summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorKonstantin Pavlov <thresh@nginx.com>2023-08-31 09:41:46 -0700
committerKonstantin Pavlov <thresh@nginx.com>2023-08-31 09:41:46 -0700
commitc45c8919c7232eb20023484f6d1fc9f1f50395d8 (patch)
treecc12eb307c1611494948645e4b487fa06495c3d2
parent88c90e1c351ab8c5bd487a5cd4b735014b08e271 (diff)
parent9b22b6957bc87b3df002d0bc691fdae6a20abdac (diff)
downloadunit-c45c8919c7232eb20023484f6d1fc9f1f50395d8.tar.gz
unit-c45c8919c7232eb20023484f6d1fc9f1f50395d8.tar.bz2
Merged with the default branch.1.31.0-1
-rw-r--r--.hgtags1
-rw-r--r--.mailmap6
-rw-r--r--CHANGES20
-rw-r--r--NOTICE11
-rw-r--r--README.md29
-rw-r--r--auto/help4
-rw-r--r--auto/make35
-rw-r--r--auto/modules/conf4
-rw-r--r--auto/modules/java6
-rw-r--r--auto/modules/java_jar.sha51226
-rw-r--r--auto/modules/wasm207
-rw-r--r--auto/njs6
-rw-r--r--auto/options2
-rw-r--r--auto/sources1
-rw-r--r--auto/summary1
-rwxr-xr-xconfigure1
-rw-r--r--docs/changes.xml92
-rw-r--r--docs/man/man8/unitd.8.in32
-rw-r--r--docs/unit-openapi.yaml6334
-rw-r--r--pkg/contrib/src/libunit-wasm/Makefile23
-rw-r--r--pkg/contrib/src/libunit-wasm/version2
-rw-r--r--pkg/contrib/src/njs/SHA512SUMS2
-rw-r--r--pkg/contrib/src/njs/version2
-rw-r--r--pkg/contrib/src/wasi-sysroot/Makefile17
-rw-r--r--pkg/contrib/src/wasi-sysroot/SHA512SUMS1
-rw-r--r--pkg/contrib/src/wasi-sysroot/version2
-rw-r--r--pkg/contrib/src/wasmtime/Makefile30
-rw-r--r--pkg/contrib/src/wasmtime/SHA512SUMS1
-rw-r--r--pkg/contrib/src/wasmtime/version1
-rw-r--r--pkg/deb/Makefile26
-rw-r--r--pkg/deb/Makefile.wasm47
-rw-r--r--pkg/deb/debian.module/copyright.unit-jsc1111
-rw-r--r--pkg/deb/debian.module/copyright.unit-jsc811
-rwxr-xr-xpkg/deb/debian.module/rules.in1
-rw-r--r--pkg/deb/debian/control.in4
-rw-r--r--pkg/deb/debian/copyright11
-rw-r--r--pkg/deb/debian/dirs1
-rw-r--r--pkg/deb/debian/rules.in10
-rw-r--r--pkg/docker/Dockerfile.go1.2017
-rw-r--r--pkg/docker/Dockerfile.go1.2189
-rw-r--r--pkg/docker/Dockerfile.jsc1119
-rw-r--r--pkg/docker/Dockerfile.minimal17
-rw-r--r--pkg/docker/Dockerfile.node1823
-rw-r--r--pkg/docker/Dockerfile.node2089
-rw-r--r--pkg/docker/Dockerfile.perl5.3617
-rw-r--r--pkg/docker/Dockerfile.perl5.3889
-rw-r--r--pkg/docker/Dockerfile.php8.217
-rw-r--r--pkg/docker/Dockerfile.python3.1117
-rw-r--r--pkg/docker/Dockerfile.ruby3.219
-rw-r--r--pkg/docker/Dockerfile.wasm109
-rw-r--r--pkg/docker/Makefile66
-rw-r--r--pkg/docker/template.Dockerfile13
-rw-r--r--pkg/rpm/Makefile10
-rw-r--r--pkg/rpm/Makefile.wasm51
-rw-r--r--pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc1111
-rw-r--r--pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc811
-rw-r--r--pkg/rpm/unit.module.spec.in1
-rw-r--r--pkg/rpm/unit.spec.in18
-rw-r--r--src/nxt_application.c3
-rw-r--r--src/nxt_application.h19
-rw-r--r--src/nxt_conf_validation.c92
-rw-r--r--src/nxt_h1proto.c2
-rw-r--r--src/nxt_h1proto.h3
-rw-r--r--src/nxt_http.h10
-rw-r--r--src/nxt_http_request.c14
-rw-r--r--src/nxt_http_rewrite.c14
-rw-r--r--src/nxt_http_route.c17
-rw-r--r--src/nxt_http_set_headers.c176
-rw-r--r--src/nxt_http_static.c2
-rw-r--r--src/nxt_http_variables.c391
-rw-r--r--src/nxt_js.c33
-rw-r--r--src/nxt_main.h2
-rw-r--r--src/nxt_main_process.c55
-rw-r--r--src/nxt_router.c13
-rw-r--r--src/nxt_runtime.c31
-rw-r--r--src/nxt_tstr.c18
-rw-r--r--src/nxt_tstr.h6
-rw-r--r--src/nxt_var.c196
-rw-r--r--src/nxt_var.h29
-rw-r--r--src/python/nxt_python_asgi.c48
-rw-r--r--src/python/nxt_python_asgi_lifespan.c54
-rw-r--r--src/python/nxt_python_asgi_str.c2
-rw-r--r--src/python/nxt_python_asgi_str.h1
-rw-r--r--src/test/nxt_unit_app_test.c4
-rw-r--r--src/unit.pc.in11
-rw-r--r--src/wasm/nxt_rt_wasmtime.c412
-rw-r--r--src/wasm/nxt_wasm.c296
-rw-r--r--src/wasm/nxt_wasm.h138
-rw-r--r--test/conftest.py343
-rw-r--r--test/python/chunked/wsgi.py18
-rw-r--r--test/test_access_log.py434
-rw-r--r--test/test_asgi_application.py638
-rw-r--r--test/test_asgi_application_unix_abstract.py32
-rw-r--r--test/test_asgi_lifespan.py175
-rw-r--r--test/test_asgi_targets.py240
-rw-r--r--test/test_asgi_websockets.py2095
-rw-r--r--test/test_client_ip.py339
-rw-r--r--test/test_configuration.py754
-rw-r--r--test/test_forwarded_header.py508
-rw-r--r--test/test_go_application.py249
-rw-r--r--test/test_go_isolation.py533
-rw-r--r--test/test_go_isolation_rootfs.py39
-rw-r--r--test/test_http_header.py842
-rw-r--r--test/test_java_application.py1795
-rw-r--r--test/test_java_isolation_rootfs.py99
-rw-r--r--test/test_java_websockets.py1983
-rw-r--r--test/test_njs.py139
-rw-r--r--test/test_njs_modules.py153
-rw-r--r--test/test_node_application.py527
-rw-r--r--test/test_node_es_modules.py62
-rw-r--r--test/test_node_websockets.py2006
-rw-r--r--test/test_perl_application.py439
-rw-r--r--test/test_php_application.py1279
-rw-r--r--test/test_php_basic.py225
-rw-r--r--test/test_php_isolation.py144
-rw-r--r--test/test_php_targets.py174
-rw-r--r--test/test_proxy.py806
-rw-r--r--test/test_proxy_chunked.py377
-rw-r--r--test/test_python_application.py1310
-rw-r--r--test/test_python_basic.py232
-rw-r--r--test/test_python_environment.py303
-rw-r--r--test/test_python_isolation.py315
-rw-r--r--test/test_python_isolation_chroot.py47
-rw-r--r--test/test_python_procman.py410
-rw-r--r--test/test_python_targets.py178
-rw-r--r--test/test_reconfigure.py70
-rw-r--r--test/test_reconfigure_tls.py153
-rw-r--r--test/test_respawn.py136
-rw-r--r--test/test_return.py402
-rw-r--r--test/test_rewrite.py350
-rw-r--r--test/test_routing.py3470
-rw-r--r--test/test_routing_tls.py45
-rw-r--r--test/test_ruby_application.py630
-rw-r--r--test/test_ruby_hooks.py127
-rw-r--r--test/test_ruby_isolation.py69
-rw-r--r--test/test_settings.py816
-rw-r--r--test/test_static.py638
-rw-r--r--test/test_static_chroot.py286
-rw-r--r--test/test_static_fallback.py287
-rw-r--r--test/test_static_mount.py217
-rw-r--r--test/test_static_share.py137
-rw-r--r--test/test_static_symlink.py136
-rw-r--r--test/test_static_types.py335
-rw-r--r--test/test_static_variables.py154
-rw-r--r--test/test_status.py359
-rw-r--r--test/test_status_tls.py43
-rw-r--r--test/test_tls.py1019
-rw-r--r--test/test_tls_conf_command.py171
-rw-r--r--test/test_tls_session.py174
-rw-r--r--test/test_tls_sni.py502
-rw-r--r--test/test_tls_tickets.py309
-rw-r--r--test/test_unix_abstract.py176
-rw-r--r--test/test_upstreams_rr.py834
-rw-r--r--test/test_usr1.py110
-rw-r--r--test/test_variables.py809
-rw-r--r--test/unit/applications/lang/go.py6
-rw-r--r--test/unit/applications/lang/java.py9
-rw-r--r--test/unit/applications/lang/node.py9
-rw-r--r--test/unit/applications/lang/perl.py7
-rw-r--r--test/unit/applications/lang/php.py7
-rw-r--r--test/unit/applications/lang/python.py9
-rw-r--r--test/unit/applications/lang/ruby.py7
-rw-r--r--test/unit/applications/proto.py33
-rw-r--r--test/unit/applications/tls.py18
-rw-r--r--test/unit/applications/websockets.py4
-rw-r--r--test/unit/check/check_prerequisites.py63
-rw-r--r--test/unit/check/chroot.py44
-rw-r--r--test/unit/check/discover_available.py47
-rw-r--r--test/unit/check/go.py5
-rw-r--r--test/unit/check/isolation.py37
-rw-r--r--test/unit/check/njs.py3
-rw-r--r--test/unit/check/node.py10
-rw-r--r--test/unit/check/regex.py5
-rw-r--r--test/unit/check/tls.py5
-rw-r--r--test/unit/check/unix_abstract.py34
-rw-r--r--test/unit/control.py4
-rw-r--r--test/unit/http.py2
-rw-r--r--test/unit/log.py104
-rw-r--r--test/unit/option.py8
-rw-r--r--test/unit/status.py4
-rw-r--r--test/unit/utils.py23
-rw-r--r--tools/README.md1
-rwxr-xr-xtools/setup-unit270
-rwxr-xr-xtools/unitc53
-rw-r--r--version4
185 files changed, 26986 insertions, 16871 deletions
diff --git a/.hgtags b/.hgtags
index 9b35d72a..215cf854 100644
--- a/.hgtags
+++ b/.hgtags
@@ -71,3 +71,4 @@ fa0227b7f62691a186d752ace475868de49e9fce 1.29.1
e7b7f2bb04e8c6f4cbe6374fd6960d4465654215 1.29.1-1
2692a5823c403a4e209681943e32a4907317d14b 1.30.0
8a0b4338a15648792bcad47edb53f1b1c0badeb4 1.30.0-1
+3a9046dca2a6c51ee2df2cabdf69cb9a83e7a1e6 1.31.0
diff --git a/.mailmap b/.mailmap
index b96dcabc..d6405683 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,2 +1,4 @@
-<a.clayton@nginx.com> <andrew@digital-domain.net>
-<a.clayton@nginx.com> <a.clayton@f5.com>
+Alejandro Colomar <alx@nginx.com> <a.colomar@f5.com>
+Alejandro Colomar <alx@nginx.com> <alx.manpages@gmail.com>
+Andrew Clayton <a.clayton@nginx.com> <andrew@digital-domain.net>
+Andrew Clayton <a.clayton@nginx.com> <a.clayton@f5.com>
diff --git a/CHANGES b/CHANGES
index 36540f4e..a0e4410e 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,4 +1,24 @@
+Changes with Unit 1.31.0 31 Aug 2023
+
+ *) Change: if building with njs, version 0.8.0 or later is now required.
+
+ *) Feature: technology preview of WebAssembly application module.
+
+ *) Feature: "response_headers" option to manage headers in the action
+ and fallback.
+
+ *) Feature: HTTP response header variables.
+
+ *) Feature: ASGI lifespan state support. Thanks to synodriver.
+
+ *) Bugfix: ensure that $uri variable is not cached.
+
+ *) Bugfix: deprecated options were unavailable.
+
+ *) Bugfix: ASGI applications inaccessible over IPv6.
+
+
Changes with Unit 1.30.0 10 May 2023
*) Change: remove Unix domain listen sockets upon reconfiguration.
diff --git a/NOTICE b/NOTICE
index 73274140..42d51c92 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/README.md b/README.md
index abe6ffba..badd2391 100644
--- a/README.md
+++ b/README.md
@@ -15,12 +15,12 @@ coherent solution with a focus on performance, low latency, and scalability. It
is intended as a universal building block for any web architecture regardless
of its complexity, from enterprise-scale deployments to your pet's homepage.
-Its native RESTful JSON API enables dynamic updates with zero interruptions
-and flexible configuration, while its out-of-the-box productivity reliably
-scales to production-grade workloads. We achieve that with a complex,
-asynchronous, multithreading architecture comprising multiple processes to
-ensure security and robustness while getting the most out of today's computing
-platforms.
+Its native [RESTful JSON API](#openapi-specification) enables dynamic
+updates with zero interruptions and flexible configuration, while its
+out-of-the-box productivity reliably scales to production-grade workloads. We
+achieve that with a complex, asynchronous, multithreading architecture
+comprising multiple processes to ensure security and robustness while getting
+the most out of today's computing platforms.
## Quick Installation
@@ -38,7 +38,7 @@ For details and available language packages, see the
### Docker
``` console
-$ docker pull docker.io/nginx/unit
+$ docker pull unit
```
For a description of image tags, see the
@@ -71,6 +71,11 @@ For details and available language packages, see the
## Running a Hello World App
+Unit runs apps in a
+[variety of languages](https://unit.nginx.org/howto/samples/).
+Let's consider a basic example,
+choosing PHP for no particular reason.
+
Suppose you saved a PHP script as `/www/helloworld/index.php`:
``` php
<?php echo "Hello, PHP on Unit!"; ?>
@@ -156,6 +161,16 @@ Unit's output should contain both snippets, neatly organized:
For full details of configuration management, see the
[docs](https://unit.nginx.org/configuration/#configuration-management).
+## OpenAPI Specification
+
+Our [OpenAPI specification](docs/unit-openapi.yaml) aims to simplify
+configuring and integrating NGINX Unit deployments and provide an authoritative
+source of knowledge about the control API.
+
+Although the specification is still in the early beta stage, it is a promising
+step forward for the NGINX Unit community. While working on it, we kindly ask
+you to experiment and provide feedback to help improve its functionality and
+usability.
## Community
diff --git a/auto/help b/auto/help
index f2307e9c..b6d9919f 100644
--- a/auto/help
+++ b/auto/help
@@ -20,6 +20,7 @@ cat << END
--modulesdir=DIR default: "\$libdir/unit/modules"
--datarootdir=DIR default: "\$prefix/share"
--mandir=DIR default: "\$datarootdir/man"
+ --pkgconfigdir=DIR default: "\$datarootdir/pkgconfig"
--localstatedir=DIR default: "\$prefix/var"
--statedir=DIR default: "\$localstatedir/lib/unit"
--runstatedir=DIR default: "\$localstatedir/run/unit"
@@ -75,4 +76,7 @@ cat << END
java OPTIONS configure Java module
run "./configure java --help" to see available options
+ wasm OPTIONS configure WebAssembly module
+ run "./configure wasm --help" to see available options
+
END
diff --git a/auto/make b/auto/make
index ecf31826..abfd41ad 100644
--- a/auto/make
+++ b/auto/make
@@ -94,7 +94,9 @@ $NXT_BUILD_DIR/lib/$NXT_LIB_SHARED: \$(NXT_LIB_OBJS)
$NXT_BUILD_DIR/lib/$NXT_LIB_STATIC: \$(NXT_LIB_OBJS)
$NXT_STATIC_LINK \$@ \$(NXT_LIB_OBJS)
-$NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC: \$(NXT_LIB_UNIT_OBJS)
+$NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC: \$(NXT_LIB_UNIT_OBJS) \\
+ $NXT_BUILD_DIR/share/pkgconfig/unit.pc \\
+ $NXT_BUILD_DIR/share/pkgconfig/unit-uninstalled.pc
$NXT_STATIC_LINK \$@ \$(NXT_LIB_UNIT_OBJS)
END
@@ -397,6 +399,10 @@ libunit-install: $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC
|| install -d \$(DESTDIR)$NXT_LIBDIR
install -p -m u=rw,go=r $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC \
\$(DESTDIR)$NXT_LIBDIR/
+ test -d \$(DESTDIR)$NXT_PKGCONFIGDIR \
+ || install -d \$(DESTDIR)$NXT_PKGCONFIGDIR
+ install -p -m u=rw,go=r $NXT_BUILD_DIR/share/pkgconfig/unit.pc \
+ \$(DESTDIR)$NXT_PKGCONFIGDIR/
test -d \$(DESTDIR)$NXT_INCLUDEDIR \
|| install -d \$(DESTDIR)$NXT_INCLUDEDIR
install -p -m u=rw,go=r src/nxt_unit.h \
@@ -414,6 +420,8 @@ libunit-install: $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC
libunit-uninstall:
rm -f \$(DESTDIR)$NXT_LIBDIR/$NXT_LIB_UNIT_STATIC
@rmdir -p \$(DESTDIR)$NXT_LIBDIR 2>/dev/null || true
+ rm -f \$(DESTDIR)$NXT_PKGCONFIGDIR/unit.pc
+ @rmdir -p \$(DESTDIR)$NXT_PKGCONFIGDIR 2>/dev/null || true
rm -f \$(DESTDIR)$NXT_INCLUDEDIR/nxt_unit.h \
\$(DESTDIR)$NXT_INCLUDEDIR/nxt_unit_field.h \
\$(DESTDIR)$NXT_INCLUDEDIR/nxt_unit_request.h \
@@ -428,6 +436,31 @@ libunit-uninstall:
END
+# pkg-config files
+
+cat << END >> $NXT_MAKEFILE
+
+$NXT_BUILD_DIR/share/pkgconfig/unit.pc: src/unit.pc.in
+ sed -e "s|@PREFIX@|$NXT_PREFIX|" \\
+ -e "s|@LIBDIR@|$NXT_LIBDIR|" \\
+ -e "s|@CFLAGS@|-I$NXT_INCLUDEDIR|" \\
+ -e "s|@VERSION@|\$(NXT_VERSION)|" \\
+ -e "s|@EXTRA_LIBS@|$NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS|" \\
+ -e "s|@CONFARGS@|$(echo $NXT_CONFIGURE_OPTIONS | sed -e 's| -pie||' -e 's| --njs||')|" \\
+ -e "s|@MODULESDIR@|$NXT_MODULESDIR|" \\
+ < src/unit.pc.in > \$@
+
+$NXT_BUILD_DIR/share/pkgconfig/unit-uninstalled.pc: src/unit.pc.in
+ sed -e "s|@PREFIX@|$(pwd)/$NXT_BUILD_DIR|" \\
+ -e "s|@LIBDIR@|$(pwd)/$NXT_BUILD_DIR/lib|" \\
+ -e "s|@CFLAGS@|-I$(pwd)/src -I$(pwd)$NXT_BUILD_DIR/include|" \\
+ -e "s|@VERSION@|\$(NXT_VERSION)|" \\
+ -e "s|@EXTRA_LIBS@|$NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS|" \\
+ -e "s|@CONFARGS@|$(echo $NXT_CONFIGURE_OPTIONS | sed -e 's| -pie||' -e 's| --njs||')|" \\
+ < src/unit.pc.in > \$@
+
+END
+
# Makefile.
# *.dSYM is MacOSX Clang debug information.
diff --git a/auto/modules/conf b/auto/modules/conf
index 7e004703..31be751f 100644
--- a/auto/modules/conf
+++ b/auto/modules/conf
@@ -33,6 +33,10 @@ case "$nxt_module" in
. auto/modules/java
;;
+ wasm)
+ . auto/modules/wasm
+ ;;
+
*)
echo
echo $0: error: invalid module \"$nxt_module\".
diff --git a/auto/modules/java b/auto/modules/java
index d87f93c5..7c39eb37 100644
--- a/auto/modules/java
+++ b/auto/modules/java
@@ -238,7 +238,7 @@ cat << END > $NXT_JAVA_JARS
static const char *nxt_java_system_jars[] = {
END
-NXT_TOMCAT_VERSION=9.0.70
+NXT_TOMCAT_VERSION=9.0.75
NXT_JAR_VERSION=$NXT_TOMCAT_VERSION
@@ -284,7 +284,7 @@ static const char *nxt_java_unit_jars[] = {
"$NXT_UNIT_JAR",
END
-NXT_JAR_VERSION=9.4.49.v20220914
+NXT_JAR_VERSION=9.4.51.v20230217
NXT_JAR_NAMESPACE=org/eclipse/jetty/
NXT_JAR_NAME=jetty-util
@@ -297,7 +297,7 @@ NXT_JAR_NAME=jetty-http
. auto/modules/java_get_jar
NXT_JAR_NAME=classgraph
-NXT_JAR_VERSION=4.8.151
+NXT_JAR_VERSION=4.8.158
NXT_JAR_NAMESPACE=io/github/classgraph/
. auto/modules/java_get_jar
diff --git a/auto/modules/java_jar.sha512 b/auto/modules/java_jar.sha512
index d3e9016c..da08f786 100644
--- a/auto/modules/java_jar.sha512
+++ b/auto/modules/java_jar.sha512
@@ -1,14 +1,14 @@
-4b47eabc83f3f672a7e91af6ae97bbdbc6f01ed7149540cb06b0f530f45a95d025cc7807a6640982d23d2da50bd973ad788a4c8fdfa025da7cf93c560abbe61e classgraph-4.8.151.jar
+00dc1aedae7cb6600b4b27ac7ec0234981a23a8c6f03dc1881304b7d999d94f0fdaa51e0008288740d9890b34f41462e8ed82c76f36a18bd45eabbb8084ec8d4 classgraph-4.8.158.jar
ab441acf5551a7dc81c353eaccb3b3df9e89a48987294d19e39acdb83a5b640fcdff7414cee29f5b96eaa8826647f1d5323e185018fe33a64c402d69c73c9158 ecj-3.26.0.jar
-82c6985f0d7c76459bf0638fdc24f3692a11804a95845b1a45203dfcc1205ab7bf67934f6babf7eb2f2b87d637a8fcbd87eae297e4403511bf73f359b1957e09 jetty-http-9.4.49.v20220914.jar
-2f199729ad9b46fda968b4bfafd657971fc9d90371852f8ad7afdae6d5752d2b84648734eabb6ffbf084800253d1da97d4bb9ad60f799ee6ae38a80c2d881fc4 jetty-server-9.4.49.v20220914.jar
-e207d93ef5bc98ad2b1a43393231bdacfb3ab642b6197a8b72d819f8ad30357c4daa0a76a0459340563fcdee0fdfc111e719a2db5be778d6b1e10f1ccbe77fc9 jetty-util-9.4.49.v20220914.jar
-a2cd93ccaa58191475df9aa40a11c8b3f14f77e78b6b2dc9e5fbebf07297e318d60c5cc5aca37e61bd748456b01491a0e6702b9e4d3ec3ef43d9b1a93f9b733e tomcat-api-9.0.70.jar
-4b2b33f6bdcb3fbff6de7da6f7558e4a21335c5c08dbc2adba1be90ddcaa4be1ba053d9021a4891edef975759a562b46a58da6c5acc2209ae8b942e4058b7022 tomcat-el-api-9.0.70.jar
-7ee837f218220022bf2543e4b3191c0a948c7f8bbd4f2e7202cc29196e5f4a8264aee027bc3521b79775b1ab0b3f8a4bef8982be9c0b2c5f95b77f36d5e5930f tomcat-jasper-9.0.70.jar
-f92cdddd3aae8d1b0b861afc67344fc6544c413d78e2e810f804632e68a3667b2b1929ac4995b582af03774ad024632e820143cd53273e06a796484ce2f0a73e tomcat-jasper-el-9.0.70.jar
-5ec6985740e7a5873f56430b1f0fd6e55a625fac8f5618d846072117f5ed8ccc69665fd6ebde40381099cf42ab9525f5da3cd16dd0b50a267734bfdf7f2e168d tomcat-jsp-api-9.0.70.jar
-33cf08f10bad572c9e7085b3ba8e91b38a293f8838a39483b01d07d9c1b9d0e67492343e0523da24af47782ec4a5d639db49679d951ccbe1da9d1309346cc693 tomcat-juli-9.0.70.jar
-0c8ee46dc49828720cd431e4e6bcb2a9d7409b3bae3d3427640b159985a27de22181151c8fa15a1f44f607730977c4ae2512c63a19c070b92e38438ad0ba8138 tomcat-servlet-api-9.0.70.jar
-e882c47acdb9e5612a0810503cb8900570b68aec5dd33dd6439884b15723a67cbf982c9cf546e7cd6d67b731df3d64ec5347500ab8a987d7cb1e11a74f819325 tomcat-util-9.0.70.jar
-0a562e8a40e406966ae2be5587dcad0ceae3143b03ef9b9f7dd77c6a2db522c31ed82b9c38b4464f9f80c1d8ca418ce6a09f9fecb3e0209a962da01e2f9bd626 tomcat-util-scan-9.0.70.jar
+606d0446a948c7a349cba5415d079bd054e43a8a09727c8300865f38678c5101642ecafa777d5f979bde1bd520543a4cf83348fcd9a444e6681cf773eb3c5824 jetty-http-9.4.51.v20230217.jar
+e9d3f7104214a16435d077eb53d943130b3a85bf9e8a48b9e40a7ad063ca3cca69324e03f21202e66fc5fd864c3cb09cd1857eff8d682c69398b4d75c1a430c6 jetty-server-9.4.51.v20230217.jar
+12725e106000d6ef672c474e467d7c976c3913b2d39a92e7304cc30d0e0d1f92575227d2c2201b97ca50f39cba81ac8b79bccb2b32b63d758ac4e192173c3f62 jetty-util-9.4.51.v20230217.jar
+b802b595d796285653c3c1b3d870295767afc6d4f857d439d579b7261dc8d015c948cd900e93ebc0ad706d1c74600645d3a18caef4f040a7085ff0e9f6cb4e44 tomcat-api-9.0.75.jar
+90cc3c2847782b6854bc0549e7bdfda2e92feac726285718aa4083baef597e472e7f3712257790107fa9306263f645fcc17344a8209415d10a75f4f4cfa30b04 tomcat-el-api-9.0.75.jar
+c23a0db30c5d77a518c7cd0afebe6c59600b45e03651409788acb91fa652752960f6e21ac15a233b433dbe290028d00ac72c4cd2856880f4d4ba9ef227692d7a tomcat-jasper-9.0.75.jar
+06fb8c0d49945437900be1cc8fd8d8123a7bf6599e7755aa6c7963a65d46aea444661de532ce0e6c0573ecd12def2d5d1e7942ecace9c7c3a39e232373f3fd6b tomcat-jasper-el-9.0.75.jar
+7dbaacd7f0490c8f0062eb12cd244acc9c51ee72d1c60ad841ac61905cd46b5d477b01c97019ba6e93a6b27de3e3438c620fb156cec3d6ece36f37918a6ee5bd tomcat-jsp-api-9.0.75.jar
+05c15c5c6877b39aed4457e5ea7992819c28dc148a2442a149a8d5ee294a62042eebd2f3846acdd70b08d531d95cdcc8cfcd7b64fb8b046aa5639e7901505131 tomcat-juli-9.0.75.jar
+92a44f8970746976a63351d45f84c7963127bd21db0af834a7d38dcfb3c29450398cb4b466636dfb3d8e764093c612eb2938af22ac2098171bac201ad6bcc320 tomcat-servlet-api-9.0.75.jar
+6794c0d6d5780ca09fdbaf801c1475f227b799c809a46195c0fd1f9792303fb6d0aa6e49ac049337863bdb512c390334470210294364b7af25d86355f7fd0605 tomcat-util-9.0.75.jar
+e97c90c857a5c814518f3da10d8c09c900417421d81ad500ad338ac10c6b7dc8338b486d2338f5cebea6fa33c9803fd2c06cf35c44b1b5b720331943f2e22de3 tomcat-util-scan-9.0.75.jar
diff --git a/auto/modules/wasm b/auto/modules/wasm
new file mode 100644
index 00000000..1f388de6
--- /dev/null
+++ b/auto/modules/wasm
@@ -0,0 +1,207 @@
+# Copyright (C) Andrew Clayton
+# Copyright (C) F5, Inc.
+
+
+NXT_WASM_RUNTIME=wasmtime
+
+shift
+
+for nxt_option; do
+
+ case "$nxt_option" in
+ -*=*) value=`echo "$nxt_option" | sed -e 's/[-_a-zA-Z0-9]*=//'` ;;
+ *) value="" ;;
+ esac
+
+ case "$nxt_option" in
+
+ --runtime=*) NXT_WASM_RUNTIME="$value" ;;
+ --module=*) NXT_WASM_MODULE="$value" ;;
+ --include-path=*) NXT_WASM_INCLUDE_PATH="$value" ;;
+ --lib-path=*) NXT_WASM_LIB_PATH="$value" ;;
+ --rpath*) NXT_WASM_RPATH="$value" ;;
+
+ --help)
+ cat << END
+
+ --runtime=RUNTIME set the WASM runtime to use (default: wasmtime)
+ --module=NAME set Unit WASM module name (default: wasm)
+ --include-path=DIRECTORY set directory path to wasmtime includes
+ --lib-path=DIRECTORY set directory path to libwasmtime.so library
+ --rpath[=DIRECTORY] set the rpath (default: --lib-path)
+
+END
+ exit 0
+ ;;
+
+ *)
+ echo
+ echo $0: error: invalid wasm option \"$nxt_option\"
+ echo
+ exit 1
+ ;;
+ esac
+
+done
+
+
+if [ ! -f $NXT_AUTOCONF_DATA ]; then
+ echo
+ echo Please run common $0 before configuring module \"$nxt_module\".
+ echo
+ exit 1
+fi
+
+. $NXT_AUTOCONF_DATA
+
+NXT_WASM=wasm
+NXT_WASM_MODULE=${NXT_WASM_MODULE=${NXT_WASM##*/}}
+
+NXT_WASM_INCLUDE_PATH=${NXT_WASM_INCLUDE_PATH=}
+NXT_WASM_LIB_PATH=${NXT_WASM_LIB_PATH=}
+NXT_WASM_LDFLAGS=
+if [ "$NXT_WASM_RUNTIME" = "wasmtime" ]; then
+ NXT_WASM_LDFLAGS=-lwasmtime
+fi
+NXT_WASM_ADDITIONAL_FLAGS="-fno-strict-aliasing \
+ -Wno-missing-field-initializers \
+ -DNXT_HAVE_WASM_$(echo ${NXT_WASM_RUNTIME} | tr 'a-z' 'A-Z') \
+"
+
+# Set the RPATH/RUNPATH.
+#
+# We temporarily disable warning on unbound variables here as
+# NXT_WASM_RPATH may be legitimately unset, in which case we
+# don't set a RPATH.
+#
+# If NXT_WASM_RPATH is set but null then we set a RPATH of the
+# value of $NXT_WASM_LIB (--lib-path) otherwise use the value
+# provided.
+set +u
+if [ "${NXT_WASM_RPATH+set}" = set ]; then
+ if [ "$NXT_WASM_RPATH" = "" ]; then
+ NXT_WASM_RPATH=$NXT_WASM_LIB_PATH
+ fi
+
+ NXT_WASM_LDFLAGS="-Wl,-rpath,$NXT_WASM_RPATH $NXT_WASM_LDFLAGS"
+fi
+set -u
+
+$echo "configuring WASM module"
+$echo "configuring WASM module ..." >> $NXT_AUTOCONF_ERR
+
+nxt_found=no
+
+if [ "$NXT_WASM_RUNTIME" = "wasmtime" ]; then
+ nxt_feature="wasmtime"
+ nxt_feature_name=""
+ nxt_feature_run=no
+ nxt_feature_incs="-I${NXT_WASM_INCLUDE_PATH}"
+ nxt_feature_libs="-L${NXT_WASM_LIB_PATH} $NXT_WASM_LDFLAGS"
+ nxt_feature_test="
+ #include <wasm.h>
+ #include <wasi.h>
+ #include <wasmtime.h>
+
+ int main(void) {
+ wasm_config_t *c;
+
+ c = wasm_config_new();
+ wasm_config_delete(c);
+
+ return 0;
+ }"
+
+ . auto/feature
+fi
+
+if [ $nxt_found = no ]; then
+ $echo
+ $echo $0: error: no $NXT_WASM_RUNTIME found.
+ $echo
+ exit 1;
+fi
+
+
+if grep ^$NXT_WASM_MODULE: $NXT_MAKEFILE 2>&1 > /dev/null; then
+ $echo
+ $echo $0: error: duplicate \"$NXT_WASM_MODULE\" module configured.
+ $echo
+ exit 1;
+fi
+
+
+$echo " + WASM module: ${NXT_WASM_MODULE}.unit.so"
+
+. auto/cc/deps
+
+$echo >> $NXT_MAKEFILE
+
+NXT_WASM_MODULE_SRCS=" \
+ src/wasm/nxt_wasm.c \
+"
+
+if [ "$NXT_WASM_RUNTIME" = "wasmtime" ]; then
+ NXT_WASM_MODULE_SRCS="$NXT_WASM_MODULE_SRCS src/wasm/nxt_rt_wasmtime.c"
+fi
+
+
+# The wasm module object files.
+
+nxt_objs=$NXT_BUILD_DIR/src/nxt_unit.o
+
+for nxt_src in $NXT_WASM_MODULE_SRCS; do
+
+ nxt_obj=${nxt_src%.c}-$NXT_WASM_MODULE.o
+ nxt_dep=${nxt_src%.c}-$NXT_WASM_MODULE.dep
+ nxt_dep_flags=`nxt_gen_dep_flags`
+ nxt_dep_post=`nxt_gen_dep_post`
+ nxt_objs="$nxt_objs $NXT_BUILD_DIR/$nxt_obj"
+
+ cat << END >> $NXT_MAKEFILE
+
+$NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H
+ mkdir -p $NXT_BUILD_DIR/src/wasm
+ \$(CC) -c \$(CFLAGS) $NXT_WASM_ADDITIONAL_FLAGS \$(NXT_INCS) \\
+ -I$NXT_WASM_INCLUDE_PATH \\
+ $nxt_dep_flags \\
+ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src
+ $nxt_dep_post
+
+-include $NXT_BUILD_DIR/$nxt_dep
+
+END
+
+done
+
+
+cat << END >> $NXT_MAKEFILE
+
+.PHONY: ${NXT_WASM_MODULE}
+.PHONY: ${NXT_WASM_MODULE}-install
+.PHONY: ${NXT_WASM_MODULE}-uninstall
+
+all: ${NXT_WASM_MODULE}
+
+${NXT_WASM_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_WASM_MODULE}.unit.so
+
+$NXT_BUILD_DIR/lib/unit/modules/${NXT_WASM_MODULE}.unit.so: $nxt_objs
+ \$(NXT_MODULE_LINK) -o \$@ \\
+ $nxt_objs -L${NXT_WASM_LIB_PATH} ${NXT_WASM_LDFLAGS} $NXT_LD_OPT
+
+
+install: ${NXT_WASM_MODULE}-install
+
+${NXT_WASM_MODULE}-install: ${NXT_WASM_MODULE} install-check
+ install -d \$(DESTDIR)$NXT_MODULESDIR
+ install -p $NXT_BUILD_DIR/lib/unit/modules/${NXT_WASM_MODULE}.unit.so \\
+ \$(DESTDIR)$NXT_MODULESDIR/
+
+
+uninstall: ${NXT_WASM_MODULE}-uninstall
+
+${NXT_WASM_MODULE}-uninstall:
+ rm -f \$(DESTDIR)$NXT_MODULESDIR/${NXT_WASM_MODULE}.unit.so
+ @rmdir -p \$(DESTDIR)$NXT_MODULESDIR 2>/dev/null || true
+
+END
diff --git a/auto/njs b/auto/njs
index c0c43f19..c54a27c7 100644
--- a/auto/njs
+++ b/auto/njs
@@ -25,6 +25,10 @@ nxt_feature_incs="$NXT_NJS_CFLAGS $NXT_NJS_AUX_CFLAGS"
nxt_feature_libs="$NXT_NJS_LIBS $NXT_NJS_AUX_LIBS"
nxt_feature_test="#include <njs.h>
+ #if NJS_VERSION_NUMBER < 0x000800
+ # error NJS < 0.8.0 is not supported.
+ #endif
+
int main(void) {
njs_vm_t *vm;
njs_vm_opt_t opts;
@@ -40,7 +44,7 @@ nxt_feature_test="#include <njs.h>
if [ $nxt_found = no ]; then
$echo
- $echo $0: error: no NJS library found.
+ $echo $0: error: no NJS library \>= 0.8.0 found.
$echo
exit 1;
fi
diff --git a/auto/options b/auto/options
index 5487be7f..0550c699 100644
--- a/auto/options
+++ b/auto/options
@@ -73,6 +73,7 @@ do
;;
--datarootdir=*) NXT_DATAROOTDIR="$value" ;;
--mandir=*) NXT_MANDIR="$value" ;;
+ --pkgconfigdir=*) NXT_PKGCONFIGDIR="$value" ;;
--localstatedir=*) NXT_LOCALSTATEDIR="$value" ;;
--statedir=*) NXT_STATEDIR="$value" ;;
--state=*)
@@ -158,6 +159,7 @@ NXT_MODULESDIR="${NXT_MODULESDIR-"$NXT_LIBDIR/unit/modules"}"
NXT_DATAROOTDIR="${NXT_DATAROOTDIR-"$NXT_PREFIX/share"}"
NXT_MANDIR="${NXT_MANDIR-"$NXT_DATAROOTDIR/man"}"
+NXT_PKGCONFIGDIR="${NXT_PKGCONFIGDIR-"$NXT_DATAROOTDIR/pkgconfig"}"
NXT_LOCALSTATEDIR="${NXT_LOCALSTATEDIR-"$NXT_PREFIX/var"}"
NXT_STATEDIR="${NXT_STATEDIR-"$NXT_LOCALSTATEDIR/lib/unit"}"
diff --git a/auto/sources b/auto/sources
index f4a7170a..6ee4d87b 100644
--- a/auto/sources
+++ b/auto/sources
@@ -93,6 +93,7 @@ NXT_LIB_SRCS=" \
src/nxt_http_route.c \
src/nxt_http_route_addr.c \
src/nxt_http_rewrite.c \
+ src/nxt_http_set_headers.c \
src/nxt_http_return.c \
src/nxt_http_static.c \
src/nxt_http_proxy.c \
diff --git a/auto/summary b/auto/summary
index fabe3b10..3aa41669 100644
--- a/auto/summary
+++ b/auto/summary
@@ -11,6 +11,7 @@ Unit configuration summary:
sbin directory: ............ "$NXT_SBINDIR"
lib directory: ............. "$NXT_LIBDIR"
include directory: ......... "$NXT_INCLUDEDIR"
+ pkgconfig directory: ....... "$NXT_PKGCONFIGDIR"
man pages directory: ....... "$NXT_MANDIR"
modules directory: ......... "$NXT_MODULESDIR"
state directory: ........... "$NXT_STATEDIR"
diff --git a/configure b/configure
index 8f2bd358..2cb4d457 100755
--- a/configure
+++ b/configure
@@ -62,6 +62,7 @@ mkdir -p $NXT_BUILD_DIR/lib
mkdir -p $NXT_BUILD_DIR/lib/unit/modules
mkdir -p $NXT_BUILD_DIR/sbin
mkdir -p $NXT_BUILD_DIR/share/man/man8
+mkdir -p $NXT_BUILD_DIR/share/pkgconfig
mkdir -p $NXT_BUILD_DIR/src
mkdir -p $NXT_BUILD_DIR/src/test
mkdir -p $NXT_BUILD_DIR/var/lib/unit
diff --git a/docs/changes.xml b/docs/changes.xml
index 68257aa8..fca9ebcd 100644
--- a/docs/changes.xml
+++ b/docs/changes.xml
@@ -14,6 +14,98 @@
unit-ruby
unit-jsc-common unit-jsc8 unit-jsc10 unit-jsc11 unit-jsc13
unit-jsc14 unit-jsc15 unit-jsc16 unit-jsc17 unit-jsc18
+ unit-jsc19 unit-jsc20
+ unit-wasm"
+ ver="1.31.0" rev="1"
+ date="2023-08-31" time="18:00:00 +0300"
+ packager="Nginx Packaging &lt;nginx-packaging@f5.com&gt;">
+
+<change>
+<para>
+NGINX Unit updated to 1.31.0.
+</para>
+</change>
+
+</changes>
+
+
+<changes apply="unit" ver="1.31.0" rev="1"
+ date="2023-08-31" time="18:00:00 +0300"
+ packager="Nginx Packaging &lt;nginx-packaging@f5.com&gt;">
+
+<change type="change">
+<para>
+if building with njs, version 0.8.0 or later is now required.
+</para>
+</change>
+
+<change type="feature">
+<para>
+technology preview of WebAssembly application module.
+</para>
+</change>
+
+<change type="feature">
+<para>
+"response_headers" option to manage headers in the action and fallback.
+</para>
+</change>
+
+<change type="feature">
+<para>
+HTTP response header variables.
+</para>
+</change>
+
+<change type="feature">
+<para>
+ASGI lifespan state support. Thanks to synodriver.
+</para>
+</change>
+
+<change type="bugfix">
+<para>
+ensure that $uri variable is not cached.
+</para>
+</change>
+
+<change type="bugfix">
+<para>
+deprecated options were unavailable.
+</para>
+</change>
+
+<change type="bugfix">
+<para>
+ASGI applications inaccessible over IPv6.
+</para>
+</change>
+
+</changes>
+
+
+<changes apply="unit-wasm" ver="1.31.0" rev="1"
+ date="2023-08-14" time="15:00:00 -0700"
+ packager="Nginx Packaging &lt;nginx-packaging@f5.com&gt;">
+
+<change>
+<para>
+Initial release of WASM module for NGINX Unit.
+</para>
+</change>
+
+</changes>
+
+
+<changes apply="unit-php
+ unit-python unit-python2.7
+ unit-python3.4 unit-python3.5 unit-python3.6 unit-python3.7
+ unit-python3.8 unit-python3.9 unit-python3.10 unit-python3.11
+ unit-go
+ unit-perl
+ unit-ruby
+ unit-jsc-common unit-jsc8 unit-jsc10 unit-jsc11 unit-jsc13
+ unit-jsc14 unit-jsc15 unit-jsc16 unit-jsc17 unit-jsc18
unit-jsc19 unit-jsc20"
ver="1.30.0" rev="1"
date="2023-05-10" time="18:00:00 +0300"
diff --git a/docs/man/man8/unitd.8.in b/docs/man/man8/unitd.8.in
index a43e671f..1c2093da 100644
--- a/docs/man/man8/unitd.8.in
+++ b/docs/man/man8/unitd.8.in
@@ -1,4 +1,18 @@
-.\" (C) 2017-2021, NGINX, Inc.
+.\" (C) 2017-2023, NGINX, Inc.
+.\" (C) 2017-2023 Andrei Zeliankou
+.\" (C) 2018-2023 Konstantin Pavlov
+.\" (C) 2021-2023 Zhidao Hong
+.\" (C) 2021-2023 Alejandro Colomar
+.\" (C) 2022-2023 Andrew Clayton
+.\" (C) 2022-2023 Liam Crilly
+.\" (C) 2017-2022 Valentin V. Bartenev
+.\" (C) 2017-2022 Max Romanov
+.\" (C) 2021-2022 Oisín Canty
+.\" (C) 2017-2021 Igor Sysoev
+.\" (C) 2017-2021 Andrei Belov
+.\" (C) 2019-2021 Tiago Natel de Moura
+.\" (C) 2019-2020 Axel Duch
+.\" (C) 2018-2019 Alexander Borisov
.\"
.Dd 2023-04-26
.Dt unitd 8
@@ -67,7 +81,23 @@ A general-purpose log for diagnostics and troubleshooting.
The socket address of Unit's control API.
.El
.Sh Copyright
+.nf
(C) 2017-2023, NGINX, Inc.
+(C) 2017-2023 Andrei Zeliankou
+(C) 2018-2023 Konstantin Pavlov
+(C) 2021-2023 Zhidao Hong
+(C) 2021-2023 Alejandro Colomar
+(C) 2022-2023 Andrew Clayton
+(C) 2022-2023 Liam Crilly
+(C) 2017-2022 Valentin V. Bartenev
+(C) 2017-2022 Max Romanov
+(C) 2021-2022 Oisín Canty
+(C) 2017-2021 Igor Sysoev
+(C) 2017-2021 Andrei Belov
+(C) 2019-2021 Tiago Natel de Moura
+(C) 2019-2020 Axel Duch
+(C) 2018-2019 Alexander Borisov
+.fi
.Pp
SPDX-License-Identifier: Apache-2.0
.Sh See also
diff --git a/docs/unit-openapi.yaml b/docs/unit-openapi.yaml
new file mode 100644
index 00000000..0301326d
--- /dev/null
+++ b/docs/unit-openapi.yaml
@@ -0,0 +1,6334 @@
+openapi: 3.0.0
+info:
+ title: "NGINX Unit 1.30.0"
+ description: "NGINX Unit is a lightweight and versatile application runtime
+ that provides the essential components for your web application as a
+ single open-source server: running application code, serving static assets,
+ handling TLS and request routing.
+
+ \n\n**Important**: Unit's API is designed to expose any part of its
+ configuration as an addressable endpoint. Suppose a JSON
+ object is stored at `/config/listeners/`:\n\n
+
+ ```json
+ {
+ \"*:8080\": {
+ \"pass\": \"applications/wp_emea_dev\"
+ }
+ }
+ ```\n
+
+ Here, `/config/listeners/*:8080` and `/config/listeners/*:8080/pass`
+ are also endpoints. Generally, object options are addressable by
+ their names, array items—by their indexes (`/array/0/`).
+
+
+ \n\n**Note**: By default, Unit is configured through a UNIX domain
+ socket. To use this specification with OpenAPI tools interactively,
+ [start](https://unit.nginx.org/howto/source/#source-startup) Unit
+ with a TCP port as the control socket."
+
+ contact:
+ name: "Unit project"
+ email: "unit-owner@nginx.org"
+ url: "https://unit.nginx.org/"
+
+ license:
+ name: "Apache 2.0"
+ url: "https://www.apache.org/licenses/LICENSE-2.0.html"
+
+ version: 0.2.0
+
+servers:
+ - url: http://{server}:{port}
+ variables:
+ server:
+ default: "localhost"
+
+ port:
+ default: "8080"
+
+# -- PATHS --
+
+paths:
+ /certificates:
+ summary: "Endpoint for the `certificates` object"
+ get:
+ operationId: getCerts
+ summary: "Retrieve the certificates object"
+ description: "Retrieves the entire `/certificates` section that represents
+ Unit's [stored certificates](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ responses:
+ "200":
+ description: "OK; the `certificates` object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/cert"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/cert"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}:
+ summary: "Endpoint for the certificate bundle object"
+ get:
+ operationId: getCertBundle
+ summary: "Retrieve the certificate bundle object"
+ description: "Retrieves the bundle description that represents
+ Unit's [stored certificate bundle]
+ (https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate bundle object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundle"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundle"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: putCertBundle
+ summary: "Create or overwrite the actual certificate bundle"
+ description: "Creates or overwrites the [stored certificate bundle]
+ (https://unit.nginx.org/certificates/) in Unit."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/bundleName"
+
+ requestBody:
+ required: true
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ /certificates/{bundleName}/key:
+ summary: "Endpoint for the certificate bundle key"
+ get:
+ operationId: getCertBundleKey
+ summary: "Retrieve the certificate bundle key type"
+ description: "Retrieves the bundle key type from a
+ [stored certificate bundle](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate bundle key type exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ Key:
+ value: "RSA (4096 bits)"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain:
+ summary: "Endpoint for the certificate bundle chain"
+ get:
+ operationId: getCertBundleChain
+ summary: "Retrieve the certificate bundle chain"
+ description: "Retrieves the bundle chain from a
+ [stored certificate bundle](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate bundle chain exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundleChain"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundleChain"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}:
+ summary: "Endpoint for the certificate object in the chain array"
+ get:
+ operationId: getCertBundleChainCert
+ summary: "Retrieve certificate object from the chain array"
+ description: "Retrieves the individual certificate from a
+ [stored certificate bundle](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundleChainCert"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundleChainCert"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject:
+ summary: "Endpoint for the certificate subject object"
+ get:
+ operationId: getCertBundleChainCertSubj
+ summary: "Retrieve the subject from the certificate object"
+ description: "Retrieves the subject from a
+ [stored certificate](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundleChainCertSubj"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundleChainCertSubj"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/common_name:
+ summary: "Endpoint for the certificate's common name"
+ get:
+ operationId: getCertBundleChainCertSubjCN
+ summary: "Retrieve the common name from the certificate subject"
+ description: "Retrieves the common name from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's common name exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ CN:
+ value: "example.com"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/country:
+ summary: "Endpoint for the certificate's country of issue"
+ get:
+ operationId: getCertBundleChainCertSubjCountry
+ summary: "Retrieve the country code from the certificate subject"
+ description: "Retrieves the country code from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's country code exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ Country:
+ value: "US"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/state_or_province:
+ summary: "Endpoint for the certificate's state or province of issue"
+ get:
+ operationId: getCertBundleChainCertSubjState
+ summary: "Retrieve the state or province code from the
+ certificate subject"
+
+ description: "Retrieves the state or province code from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's state or province code
+ exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ StateProvince:
+ value: "CA"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/organization:
+ summary: "Endpoint for the certificate's designated organization"
+ get:
+ operationId: getCertBundleChainCertSubjOrg
+ summary: "Retrieve the organization name from the certificate subject"
+ description: "Retrieves the organization name from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's organization name exists
+ in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ Org:
+ value: "Acme, Inc."
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/alt_names:
+ summary: "Endpoint for the certificate's alternative names"
+ get:
+ operationId: getCertBundleChainCertSubjAltArray
+ summary: "Retrieve the alternative names array from the
+ certificate subject"
+
+ description: "Retrieves the alternative names array from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's alternative names array
+ exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/stringArray"
+
+ examples:
+ AltNames:
+ value:
+ - "example.com"
+ - "www.example.com"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/subject/alt_names/{arrayIndex2}:
+ summary: "Endpoint for the certificate's alternative name"
+ get:
+ operationId: getCertBundleChainCertSubjAlt
+ summary: "Retrieve an alternative name from the certificate subject"
+ description: "Retrieves an alternative name from a
+ [stored certificate's subject](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/arrayIndex2"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate subject's alternative name exists
+ in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ AltName:
+ value: "example.com"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/issuer:
+ summary: "Endpoint for the certificate issuer object"
+ get:
+ operationId: getCertBundleChainCertIssuer
+ summary: "Retrieve the issuer object from the certificate object"
+ description: "Retrieves the issuer object from a
+ [stored certificate](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate issuer object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundleChainCertIssuer"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundleChainCertIssuer"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/issuer/common_name:
+ summary: "Endpoint for the certificate issuer's common name"
+ get:
+ operationId: getCertBundleChainCertIssuerCN
+ summary: "Retrieve the common name from the certificate issuer"
+ description: "Retrieves the common name from a
+ [stored certificate's issuer](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate issuer's common name exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ CN:
+ value: "intermediate.ca.example.com"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/issuer/country:
+ summary: "Endpoint for the certificate issuer's country of issue"
+ get:
+ operationId: getCertBundleChainCertissuerCountry
+ summary: "Retrieve the country code from the certificate issuer"
+ description: "Retrieves the country code from a
+ [stored certificate's issuer](https://unit.nginx.org/certificates/)."
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate issuer's country code exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ Country:
+ value: "US"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/issuer/state_or_province:
+ summary: "Endpoint for the certificate issuer's state or province of issue"
+ get:
+ operationId: getCertBundleChainCertIssuerState
+ summary: "Retrieve the state or province code from the certificate issuer"
+ description: "Retrieves the state or province code from a
+ [stored certificate's issuer](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate issuer's state or province code
+ exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ StateProvince:
+ value: "CA"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/issuer/organization:
+ summary: "Endpoint for the certificate issuer's designated organization"
+ get:
+ operationId: getCertBundleChainCertIssuerOrg
+ summary: "Retrieve the organization name from the certificate issuer"
+ description: "Retrieves the organization name from a
+ [stored certificate's issuer](https://unit.nginx.org/certificates/)."
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate issuer's organization name exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ Org:
+ value: "Acme Certification Authority"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/validity:
+ summary: "Endpoint for the certificate validity object"
+ get:
+ operationId: getCertBundleChainCertValid
+ summary: "Retrieve the validity object from the certificate object"
+ description: "Retrieves the validity object from a
+ [stored certificate](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the certificate validity object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/certBundleChainCertValidity"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/certBundleChainCertValidity"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/validity/since:
+ summary: "Endpoint for the certificate validity's starting time"
+ get:
+ operationId: getCertBundleChainCertValidSince
+ summary: "Retrieve the starting time of certificate validity"
+ description: "Retrieves the starting time of a
+ [stored certificate's validity](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the starting time of certificate validity exists
+ in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ DateTime:
+ value: "Sep 18 19:46:19 2022 GMT"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /certificates/{bundleName}/chain/{arrayIndex}/validity/until:
+ summary: "Endpoint for the certificate validity's ending time"
+ get:
+ operationId: getCertBundleChainCertValidUntil
+ summary: "Retrieve the ending time of certificate validity"
+ description: "Retrieves the ending time of a
+ [stored certificate's validity](https://unit.nginx.org/certificates/)."
+
+ tags:
+ - certificates
+
+ parameters:
+ - $ref: "#/components/parameters/arrayIndex"
+ - $ref: "#/components/parameters/bundleName"
+
+ responses:
+ "200":
+ description: "OK; the ending time of certificate validity exists
+ in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ DateTime:
+ value: "Sep 18 19:46:19 2022 GMT"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config:
+ summary: "Endpoint for the `config` object"
+ get:
+ operationId: getConfig
+ summary: "Retrieve the config"
+ description: "Retrieves the `config` object that represents Unit's
+ [configuration](https://unit.nginx.org/configuration)."
+
+ tags:
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `config` object exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/config"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/config"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateConfig
+ summary: "Create or overwrite the config"
+ description: "Creates or overwrites the entire `config` object."
+ tags:
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/config"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/config"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteConfig
+ summary: "Delete the config object"
+ description: "Deletes the entire `config` object."
+ tags:
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/access_log:
+ summary: "Endpoint for the `access_log` object"
+ get:
+ operationId: getAccessLog
+ summary: "Retrieve the access log"
+ description: "Retrieves the `access_log` entity that represents Unit's
+ [access log](https://unit.nginx.org/configuration/#access-log)."
+
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `access_log` entity exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configAccessLog"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configAccessLogBasic"
+
+ example2:
+ $ref: "#/components/examples/configAccessLogComplex"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateAccessLog
+ summary: "Create or overwrite the access log"
+ description: "Creates or overwrites the entire `access_log` entity."
+ tags:
+ - access log
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configAccessLog"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configAccessLogBasic"
+
+ example2:
+ $ref: "#/components/examples/configAccessLogComplex"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteAccessLog
+ summary: "Delete the access log"
+ description: "Deletes the entire `access_log` section."
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/access_log/format:
+ summary: "Endpoint for the `format` access log option"
+ get:
+ operationId: getAccessLogFormat
+ summary: "Retrieve the access log format option"
+ description: "Retrieves the `format` option that represents Unit's
+ [access log format](https://unit.nginx.org/configuration/#access-log)
+ in the `access_log` object."
+
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `format` option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ format:
+ value: '$remote_addr - - [$time_local] "$request_line" $status
+ $body_bytes_sent "$header_referer" "$header_user_agent"'
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateAccessLogFormat
+ summary: "Create or overwrite the access log format"
+ description: "Creates or overwrites the `format` option in the
+ `access_log` object."
+
+ tags:
+ - access log
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ format:
+ value: '$remote_addr - - [$time_local] "$request_line" $status
+ $body_bytes_sent "$header_referer" "$header_user_agent"'
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteAccessLogFormat
+ summary: "Delete the access log format"
+ description: "Deletes the `format` option from the `access_log` object."
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/access_log/path:
+ summary: "Endpoint for the `path` access log option"
+ get:
+ operationId: getAccessLogPath
+ summary: "Retrieve the access log path option"
+ description: "Retrieves the `path` option that represents Unit's
+ [access log path](https://unit.nginx.org/configuration/#access-log)
+ in the `access_log` object."
+
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `path` option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ path:
+ value: "/var/log/unit/access.log"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateAccessLogPath
+ summary: "Create or overwrite the access log path"
+ description: "Creates or overwrites the `path` option in the `access_log`
+ object."
+
+ tags:
+ - access log
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ path:
+ value: "/var/log/unit/access.log"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteAccessLogPath
+ summary: "Delete the access log path"
+ description: "Deletes the `path` option from the `access_log` object."
+ tags:
+ - access log
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/applications:
+ summary: "Endpoint for the `applications` object in the configuration"
+ get:
+ operationId: getApplications
+ summary: "Retrieve the applications object"
+ description: "Retrieves the `applications` object that represents Unit's
+ [applications](https://unit.nginx.org/configuration/#applications)."
+ tags:
+ - applications
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `applications` object exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configApplications"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configApplications"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateApplications
+ summary: "Overwrite the applications object"
+ description: "Overwrites the `applications` object in the configuration."
+ tags:
+ - applications
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configApplications"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configApplications"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteApplications
+ summary: "Delete the applications object"
+ description: "Deletes the `applications` object from the configuration."
+ tags:
+ - applications
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/applications/{appName}:
+ summary: "Endpoint for an application object in the configuration"
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ get:
+ operationId: getApplication
+ summary: "Retrieve an application object"
+ description: "Retrieves the `{appName}` object that represents an
+ [application](https://unit.nginx.org/configuration/#applications)
+ in Unit's control API."
+
+ tags:
+ - applications
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `{appName}` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configApplication"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configApplication"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateApplication
+ summary: "Create or overwrite the application object"
+ description: "Creates or overwrites the `{appName}` object in the
+ configuration."
+
+ tags:
+ - applications
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configApplication"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configApplication"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteApplication
+ summary: "Delete the application object"
+ description: "Deletes the `{appName}` object from the configuration."
+ tags:
+ - applications
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /control/applications/{appName}/restart:
+ summary: "Endpoint for the `applications/{appName}/restart` option"
+ get:
+ operationId: getAppRestart
+ summary: "Restart the {appName} application"
+ description: "Tells Unit to [restart]
+ (https://unit.nginx.org/configuration/#process-management)
+ the application identified by `{appName}`."
+
+ tags:
+ - apps
+ - control
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `{appName}` application was gracefully
+ restarted."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonSuccessMessage"
+
+ example:
+ success: "Ok"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners:
+ summary: "Endpoint for the `listeners` object"
+ get:
+ operationId: getListeners
+ summary: "Retrieve all the listeners"
+ description: "Retrieves the `listeners` object whose options represent
+ individual [listeners](https://unit.nginx.org/configuration/#listeners);
+ each is a unique combination of a host IP address (or a `*` wildcard to
+ match any host IP addresses) and a port."
+
+ tags:
+ - listeners
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `listeners` object exists in the configuration."
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListeners"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListeners"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListeners
+ summary: "Create or overwrite all the listeners"
+ description: "Creates or overwrites the entire `listeners` section with
+ an object whose options represent individual listeners; each is a unique
+ combination of a host IP address (or a `*` wildcard to match any host IP
+ addresses) and a port."
+
+ tags:
+ - listeners
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListeners"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerSimple"
+
+ example2:
+ $ref: "#/components/examples/configListeners"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListeners
+ summary: "Delete all the listeners"
+ description: "Deletes the entire `listeners` section."
+ tags:
+ - listeners
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}:
+ summary: "Endpoint for a `listeners/{listenerName}` object that
+ represents a listener"
+
+ get:
+ operationId: getListener
+ summary: "Retrieve a listener object"
+ description: "Retrieves the `{listenerName}` object that configures a
+ [listener](https://unit.nginx.org/configuration/#listeners)."
+
+ tags:
+ - listeners
+ - config
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `{listenerName}` object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListener"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerSimple"
+
+ example2:
+ $ref: "#/components/examples/configListenerComplex"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListener
+ summary: "Create or overwrite a listener object"
+ description: "Creates or overwrites the `{listenerName}` object."
+ tags:
+ - listeners
+ - config
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListener"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerSimple"
+
+ example2:
+ $ref: "#/components/examples/configListenerComplex"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListener
+ summary: "Delete a listener object"
+ description: "Deletes the `{listenerName}` object."
+ tags:
+ - listeners
+ - config
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/pass:
+ summary: "Endpoint for the `listeners/{listenerName}/pass` option"
+ get:
+ operationId: getListenerPass
+ summary: "Retrieve the pass option in a listener"
+ description: "Retrieves the `pass` option that configures the destination
+ where the `{listenerName}` listener object
+ [passes its requests](https://unit.nginx.org/configuration/#listeners)."
+
+ tags:
+ - listeners
+ - config
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `pass` option exists in the configuration."
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerPassApp"
+
+ example2:
+ $ref: "#/components/examples/configListenerPassRoute"
+
+ example3:
+ $ref: "#/components/examples/configListenerPassAppTarget"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerPass
+ summary: "Update the pass option in a listener"
+ description: "Overwrites the `pass` option."
+ tags:
+ - listeners
+ - config
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerPassApp"
+
+ example2:
+ $ref: "#/components/examples/configListenerPassRoute"
+
+ example3:
+ $ref: "#/components/examples/configListenerPassAppTarget"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ /config/listeners/{listenerName}/tls:
+ summary: "Endpoint for the `listeners/{listenerName}/tls` object"
+ get:
+ operationId: getListenerTls
+ summary: "Retrieve the tls object in a listener"
+ description: "Retrieves the `tls` object that configures [TLS settings]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ for the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `tls` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTls"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTls"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerTls
+ summary: "Create or overwrite the tls object in a listener"
+ description: "Creates or overwrites the entire `tls` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTls"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTls"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTls
+ summary: "Delete the tls object in a listener"
+ description: "Deletes the `tls` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/conf_commands:
+ summary: "Endpoint for the `listeners/{listenerName}/tls/conf_commands`
+ object"
+
+ get:
+ operationId: listListenerTlsConfCommands
+ summary: "Retrieve the conf_commands object in a listener"
+ description: "Retrieves the `conf_commands` object that sets
+ [TLS configuration commands]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ for the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `conf_commands` object exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsConfCommands"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsConfCommands"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerTlsConfCommands
+ summary: "Create or overwrite the conf_commands object in a listener"
+ description: "Creates or overwrites the entire `conf_commands` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsConfCommands"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsConfCommands"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsConfCommands
+ summary: "Delete the conf_commands object in a listener"
+ description: "Deletes the `conf_commands` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/session:
+ summary: "Endpoint for the `listeners/{listenerName}/tls/session` object"
+ get:
+ operationId: getListenerTlsSession
+ summary: "Retrieve the session object in a listener"
+ description: "Retrieves the `session` object that configures
+ [TLS session settings]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ for the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `session` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsSession"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSession"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerTlsSession
+ summary: "Create or overwrite the session object in a listener"
+ description: "Creates or overwrites the entire `session` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsSession"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSession"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsSession
+ summary: "Delete the session object in a listener"
+ description: "Deletes the `session` object."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/session/tickets:
+ summary: "Endpoint for the `listeners/{listenerName}/session/tickets`
+ object"
+
+ get:
+ operationId: listListenerTlsSessionTickets
+ summary: "Retrieve the tickets option in a listener"
+ description: "Retrieves the `tickets` option that lists
+ [TLS session ticket keys]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ used with the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `tickets` option exists in the configuration."
+
+ content:
+ application/jsons:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsSessionTickets"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsBool"
+
+ example2:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsString"
+
+ example3:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsArray"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ post:
+ operationId: insertListenerTlsSessionTicket
+ summary: "Add a new tickets array item in a listener"
+ description: "Adds a new session ticket key to the end of the `tickets`
+ string array that lists [session ticket keys]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ configured earlier."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ put:
+ operationId: updateListenerTlsSessionTickets
+ summary: "Create or overwrite the tickets option in a listener"
+ description: "Creates or overwrites the entire `tickets` option with a
+ boolean, string, or string array that configures [session ticket keys]
+ (https://unit.nginx.org/configuration/#ssl-tls-configuration)
+ used with the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsSessionTickets"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsBool"
+
+ example2:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsString"
+
+ example3:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsArray"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsSessionTickets
+ summary: "Delete the tickets option in a listener"
+ description: "Deletes the `tickets` option."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex}:
+ summary: "Endpoint for the `listeners/{listenerName}/tls/certificate`
+ object"
+
+ get:
+ operationId: getListenerTlsSessionTicket
+ summary: "Retrieve a ticket array item in a listener"
+ description: "Retrieves the `{arrayIndex}`th item from the `tickets`
+ string array."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ description: "OK; the ticket key at `{arrayIndex}` exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsString"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerTlsSessionTicket
+ summary: "Create or overwrite a ticket array item in a listener"
+ description: "Overwrites a single `tickets` string array item identified
+ by `{arrayIndex}`."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsSessionTicketsString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsSessionTicket
+ summary: "Delete a ticket array item in a listener"
+ description: "Deletes an item from the `tickets` string array."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/certificate:
+ summary: "Endpoint for the `listeners/{listenerName}/tls/certificate`
+ object"
+
+ get:
+ operationId: listListenerTlsCertificates
+ summary: "Retrieve the certificate option in a listener"
+ description: "Retrieves the `certificate` option that lists [certificate
+ bundles](https://unit.nginx.org/configuration/#certificate-management)
+ used with the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `certificate` option exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerTlsCertificate"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsCertificateArray"
+
+ example2:
+ $ref: "#/components/examples/configListenerTlsCertificateString"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ post:
+ operationId: insertListenerTlsCertificate
+ summary: "Add a new certificate array item in a listener"
+ description: "Adds a new certificate bundle name to the end of the
+ `certificate` string array."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsCertificateString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ put:
+ operationId: updateListenerTlsCertificates
+ summary: "Create or overwrite the certificate option in a listener"
+ description: "Creates or overwrites the entire `certificate` option."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsCertificateArray"
+
+ example2:
+ $ref: "#/components/examples/configListenerTlsCertificateString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsCertificates
+ summary: "Delete the certificate option in a listener"
+ description: "Deletes the `certificate` option."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/tls/certificate/{arrayIndex}:
+ summary: "Endpoint for the `listeners/{listenerName}/tls/certificate`
+ string array item"
+
+ get:
+ operationId: getListenerTlsCertificate
+ summary: "Retrieve a certificate array item in a listener"
+ description: "Retrieves the `{arrayIndex}`th item from the `certificate`
+ string array."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ description: "OK; the certificate bundle at `{arrayIndex}` exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsCertificateString"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerTlsCertificate
+ summary: "Update a certificate array item in a listener"
+ description: "Overwrites a single `certificate` string array item
+ identified by `{arrayIndex}`."
+
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerTlsCertificateString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerTlsCertificate
+ summary: "Delete a certificate array item in a listener"
+ description: "Deletes an item from the `certificate` string array."
+ tags:
+ - listeners
+ - config
+ - tls
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/forwarded:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded` object"
+ get:
+ operationId: getListenerForwarded
+ summary: "Retrieve the forwarded object in a listener"
+ description: "Retrieves the `forwarded` object that configures
+ [originating IP identification]
+ (https://unit.nginx.org/configuration/#ip-protocol-forwarding)
+ for the `{listenerName}` listener."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `forwarded` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerForwarded"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwarded"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerForwarded
+ summary: "Create or overwrite the forwarded object in a listener"
+ description: "Creates or overwrites the entire `forwarded` object."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configListenerForwarded"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwarded"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerForwared
+ summary: "Delete the forwarded object in a listener"
+ description: "Deletes the `forwarded` object."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/forwarded/client_ip:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded/client_ip`
+ option"
+
+ get:
+ operationId: getListenerForwardedClientIp
+ summary: "Retrieve the client_ip option in a listener"
+ description: "Retrieves the `client_ip` option that configures the headers
+ expected by the `{listenerName}` listener for
+ [originating IP identification]
+ (https://unit.nginx.org/configuration/#originating-ip-identification)."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `client_ip` option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedClientIp"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerForwardedClientIp
+ summary: "Create or overwrite the client_ip option in a listener"
+ description: "Creates or overwrites the `client_ip` option."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedClientIp"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ /config/listeners/{listenerName}/forwarded/protocol:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded/protocol`
+ option"
+
+ get:
+ operationId: getListenerForwardedProtocol
+ summary: "Retrieve the protocol option in a listener"
+ description: "Retrieves the `protocol` option that configures the protocol
+ expected by the `{listenerName}` listener for
+ [originating IP identification]
+ (https://unit.nginx.org/configuration/#originating-ip-identification)."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `protocol` option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+ enum:
+ - "http"
+ - "https"
+ - "on"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedProtocol"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerForwardedProtocol
+ summary: "Create or overwrite the protocol option in a listener"
+ description: "Creates or overwrites the `protocol` option."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+ enum:
+ - "http"
+ - "https"
+ - "on"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedProtocol"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ /config/listeners/{listenerName}/forwarded/recursive:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded/recursive`
+ option"
+
+ get:
+ operationId: getListenerForwardedRecursive
+ summary: "Retrieve the recursive option in a listener"
+ description: "Retrieves the `recursive` option that controls how the
+ `{listenerName}` listener uses [originating IP identification]
+ (https://unit.nginx.org/configuration/#originating-ip-identification)."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `recursive` option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ example1:
+ summary: "Enables recursive header field traversal"
+ value: true
+
+ example2:
+ summary: "Disables recursive header field traversal"
+ value: false
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerForwardedRecursive
+ summary: "Create or overwrite the recursive option in a listener"
+ description: "Creates or overwrites the `recursive` option."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ example1:
+ summary: "Enables recursive header field traversal"
+ value: true
+
+ example2:
+ summary: "Disables recursive header field traversal"
+ value: false
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerForwardedRecursive
+ summary: "Delete the recursive object in a listener"
+ description: "Deletes the `recursive` object."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/forwarded/source:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded/source`
+ object"
+
+ get:
+ operationId: listListenerForwardedSources
+ summary: "Retrieve the source option in a listener"
+ description: "Retrieves the `source` option that defines address patterns
+ for trusted addresses, used by the `{listenerName}` listener for
+ [originating IP identification]
+ (https://unit.nginx.org/configuration/#originating-ip-identification)."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ description: "OK; the `source` option exists in the configuration."
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedSourceArray"
+
+ example2:
+ $ref: "#/components/examples/configListenerForwardedSourceString"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ post:
+ operationId: insertListenerForwardedSource
+ summary: "Add a new source array item in a listener"
+ description: "Adds a new source bundle name to the end of the `source`
+ string array defines address patterns for trusted addresses, used by
+ the `{listenerName}` listener for [originating IP identification]
+ (https://unit.nginx.org/configuration/#originating-ip-identification)."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedSourceString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ put:
+ operationId: updateListenerForwardedSources
+ summary: "Create or overwrite the source option in a listener"
+ description: "Creates or overwrites the entire `source` option."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedSourceArray"
+
+ example2:
+ $ref: "#/components/examples/configListenerForwardedSourceString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerForwardedSources
+ summary: "Delete the source option in a listener"
+ description: "Deletes the `source` option."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/listeners/{listenerName}/forwarded/source/{arrayIndex}:
+ summary: "Endpoint for the `listeners/{listenerName}/forwarded/source`
+ string array item"
+
+ get:
+ operationId: getListenerForwardedSource
+ summary: "Retrieve a source array item in a listener"
+ description: "Retrieves the `{arrayIndex}`th item from the `source`
+ string array."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ description: "OK; the address pattern at `{arrayIndex}` exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedSourceString"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateListenerForwardedSource
+ summary: "Update a source array item in a listener"
+ description: "Overwrites a single `source` string array item identified
+ by `{arrayIndex}`."
+
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: string
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configListenerForwardedSourceString"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteListenerForwardedSource
+ summary: "Delete a source array item in a listener"
+ description: "Deletes an item from the `source` string array."
+ tags:
+ - listeners
+ - config
+ - xff
+
+ parameters:
+ - $ref: "#/components/parameters/listenerName"
+ - $ref: "#/components/parameters/arrayIndex"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/routes:
+ summary: "Endpoint for the `routes` entity in the configuration"
+ get:
+ operationId: getRoutes
+ summary: "Retrieve the routes entity"
+ description: "Retrieves the `routes` entity that represents Unit's
+ [routes](https://unit.nginx.org/configuration/#routes)."
+
+ tags:
+ - config
+ - routes
+
+ responses:
+ "200":
+ description: "OK; the `routes` entity exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configRoutes"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configRoutes"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateRoutes
+ summary: "Overwrite the routes entity"
+ description: "Overwrites the `routes` entity in the configuration."
+ tags:
+ - config
+ - routes
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configRoutes"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configRoutes"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteRoutes
+ summary: "Delete the routes entity"
+ description: "Deletes the `routes` entity from the configuration."
+ tags:
+ - config
+ - routes
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings:
+ summary: "Endpoint for the `settings` object in the configuration"
+ get:
+ operationId: getSettings
+ summary: "Retrieve the settings object"
+ description: "Retrieves the `settings` object that represents Unit's
+ [global settings](https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `settings` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettings"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettings"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettings
+ summary: "Create or overwrite the settings object"
+ description: "Creates or overwrites the `settings` object in the
+ configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettings"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettings"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettings
+ summary: "Delete the settings object"
+ description: "Deletes the `settings` object from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http:
+ summary: "Endpoint for the `http` object in `settings`"
+
+ get:
+ operationId: getSettingsHttp
+ summary: "Retrieve the http object from settings"
+ description: "Retrieves the `http` object that represents Unit's
+ [HTTP settings](https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `http` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttp"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttp"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttp
+ summary: "Create or overwrite the http object"
+ description: "Creates or overwrites the `http` object in the
+ configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttp"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttp"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttp
+ summary: "Delete the http object"
+ description: "Deletes the `http` object from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/body_read_timeout:
+ summary: "Endpoint for the `body_read_timeout` option in `http`"
+ get:
+ operationId: getSettingsHttpBodyReadTimeout
+ summary: "Retrieve the body_read_timeout option from http settings"
+ description: "Retrieves the `body_read_timeout` option that represents
+ Unit's [request body read timeout]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `body_read_timeout` option exists in the
+ configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ BodyReadTimeout:
+ value: 30
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpBodyReadTimeout
+ summary: "Create or overwrite the body_read_timeout option"
+ description: "Creates or overwrites the `body_read_timeout` option in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ BodyReadTimeout:
+ value: 30
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpBodyReadTimeout
+ summary: "Delete the body_read_timeout option"
+ description: "Deletes the `body_read_timeout` option from the
+ configuration."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/discard_unsafe_fields:
+ summary: "Endpoint for the `discard_unsafe_fields` option in `http`"
+ get:
+ operationId: getSettingsDiscardUnsafeFields
+ summary: "Retrieve the discard_unsafe_fields option from http settings"
+ description: "Retrieves the `discard_unsafe_fields` option that represents
+ Unit's [header processing behavior]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `discard_unsafe_fields` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ DiscardUnsafeFields:
+ value: true
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsDiscardUnsafeFields
+ summary: "Create or overwrite the discard_unsafe_fields option"
+ description: "Creates or overwrites the `discard_unsafe_fields` option
+ in the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ DiscardUnsafeFields:
+ value: true
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsDiscardUnsafeFields
+ summary: "Delete the discard_unsafe_fields option"
+ description: "Deletes the `discard_unsafe_fields` option from
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/header_read_timeout:
+ summary: "Endpoint for the `header_read_timeout` option in `http`"
+ get:
+ operationId: getSettingsHttpHeaderReadTimeout
+ summary: "Retrieve the header_read_timeout option from http settings"
+ description: "Retrieves the `header_read_timeout` option that represents
+ Unit's [request headers read timeout]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `header_read_timeout` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ HeaderReadTimeout:
+ value: 30
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpHeaderReadTimeout
+ summary: "Create or overwrite the header_read_timeout option"
+ description: "Creates or overwrites the `header_read_timeout` option
+ in the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ HeaderReadTimeout:
+ value: 30
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpHeaderReadTimeout
+ summary: "Delete the header_read_timeout option"
+ description: "Deletes the `header_read_timeout` option from
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/idle_timeout:
+ summary: "Endpoint for the `idle_timeout` option in `http`"
+ get:
+ operationId: getSettingsHttpIdleTimeout
+ summary: "Retrieve the idle_timeout option from http settings"
+ description: "Retrieves the `idle_timeout` option that represents
+ Unit's [keep-alive idling timeout]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `idle_timeout` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ IdleTimeout:
+ value: 180
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpIdleTimeout
+ summary: "Create or overwrite the idle_timeout option"
+ description: "Creates or overwrites the `idle_timeout` option in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ IdleTimeout:
+ value: 180
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpIdleTimeout
+ summary: "Delete the idle_timeout option"
+ description: "Deletes the `idle_timeout` option from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/log_route:
+ summary: "Endpoint for the `log_route` option in `http`"
+ get:
+ operationId: getSettingsLogRoute
+ summary: "Retrieve the log_route option from http settings"
+ description: "Retrieves the `log_route` option that controls
+ Unit's [router logging]
+ (https://unit.nginx.org/troubleshooting/#router-log)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `log_route` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ LogRoute:
+ value: true
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsLogRoute
+ summary: "Create or overwrite the log_route option"
+ description: "Creates or overwrites the `log_route` option
+ in the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ LogRoute:
+ value: true
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsLogRoute
+ summary: "Delete the log_route option"
+ description: "Deletes the `log_route` option from
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/max_body_size:
+ summary: "Endpoint for the `max_body_size` option in `http`"
+ get:
+ operationId: getSettingsHttpMaxBodySize
+ summary: "Retrieve the max_body_size option from http settings"
+ description: "Retrieves the `max_body_size` option that represents
+ Unit's [request body size limit]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `max_body_size` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ MaxBodySize:
+ value: 8388608
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpMaxBodySize
+ summary: "Create or overwrite the max_body_size option"
+ description: "Creates or overwrites the `max_body_size` option in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ MaxBodySize:
+ value: 8388608
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpMaxBodySize
+ summary: "Delete the max_body_size option"
+ description: "Deletes the `max_body_size` option from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/send_timeout:
+ summary: "Endpoint for the `send_timeout` option in `http`"
+ get:
+ operationId: getSettingsHttpSendTimeout
+ summary: "Retrieve the send_timeout option from http settings"
+ description: "Retrieves the `send_timeout` option that represents
+ Unit's [response send timeout]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `send_timeout` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ SendTimeout:
+ value: 30
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpSendTimeout
+ summary: "Create or overwrite the send_timeout option"
+ description: "Creates or overwrites the `send_timeout` option in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ SendTimeout:
+ value: 30
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpSendTimeout
+ summary: "Delete the send_timeout option"
+ description: "Deletes the `send_timeout` option from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/server_version:
+ summary: "Endpoint for the `server_version` option in `http`"
+ get:
+ operationId: getSettingsServerVersion
+ summary: "Retrieve the server_version option from http settings"
+ description: "Retrieves the `server_version` option that controls
+ Unit's [Server header field versioning]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `server_version` option exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ ServerVersion:
+ value: true
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsServerVersion
+ summary: "Create or overwrite the server_version option"
+ description: "Creates or overwrites the `server_version` option
+ in the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: boolean
+
+ examples:
+ ServerVersion:
+ value: true
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsServerVersion
+ summary: "Delete the server_version option"
+ description: "Deletes the `server_version` option from
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/static:
+ summary: "Endpoint for the `static` object in `http`"
+ get:
+ operationId: getSettingsHttpStatic
+ summary: "Retrieve the static object from http settings"
+ description: "Retrieves the `static` object that represents
+ Unit's [static content settings]
+ (https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `static` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStatic"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStatic"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpStatic
+ summary: "Create or overwrite the static object"
+ description: "Creates or overwrites the `static` object in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStatic"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStatic"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpStatic
+ summary: "Delete the static object"
+ description: "Deletes the `static` object from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/static/mime_types:
+ summary: "Endpoint for the `mime_types` object in `static`"
+ get:
+ operationId: getSettingsHttpStaticMimeTypes
+ summary: "Retrieve the mime_types object from static settings"
+ description: "Retrieves the `mime_types` object that represents Unit's
+ [MIME type settings](https://unit.nginx.org/configuration/#settings)."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the `mime_types` object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeTypes"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStaticMimeTypes"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpStaticMimeTypes
+ summary: "Create or overwrite the mime_types object"
+ description: "Creates or overwrites the `mime_types` object in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeTypes"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStaticMimeTypes"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpStaticMimeTypes
+ summary: "Delete the mime_types object"
+ description: "Deletes the `mime_types` object from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /config/settings/http/static/mime_types/{mimeType}:
+ summary: "Endpoint for a MIME type option in `mime_types`"
+ parameters:
+ - $ref: "#/components/parameters/mimeType"
+
+ get:
+ operationId: getSettingsHttpStaticMimeType
+ summary: "Retrieve the MIME type option from MIME type settings"
+ description: "Retrieves the MIME type option that represents a
+ [MIME type](https://unit.nginx.org/configuration/#settings)
+ supported by Unit."
+
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ description: "OK; the MIME type option exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeType"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStaticMimeType"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ put:
+ operationId: updateSettingsHttpStaticMimeType
+ summary: "Create or overwrite the MIME type option"
+ description: "Creates or overwrites the MIME type option in
+ the configuration."
+
+ tags:
+ - settings
+ - config
+
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeType"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/configSettingsHttpStaticMimeType"
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkUpdated"
+
+ "400":
+ $ref: "#/components/responses/responseBadRequest"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ "500":
+ $ref: "#/components/responses/responseInternalError"
+
+ delete:
+ operationId: deleteSettingsHttpStaticMimeType
+ summary: "Delete the MIME type option"
+ description: "Deletes the MIME type option from the configuration."
+ tags:
+ - settings
+ - config
+
+ responses:
+ "200":
+ $ref: "#/components/responses/responseOkDeleted"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status:
+ summary: "Endpoint for the `status` object"
+ get:
+ operationId: getStatus
+ summary: "Retrieve the status object"
+ description: "Retrieves the entire `/status` section that represents
+ Unit's [usage statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `status` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/status"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/status"
+
+ /status/connections:
+ summary: "Endpoint for the `connections` status object"
+ get:
+ operationId: getStatusConnections
+ summary: "Retrieve the connections status object"
+ description: "Retrieves the `connections` status object that represents
+ Unit's [connection statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `connections` object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusConnections"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusConnections"
+
+ /status/connections/accepted:
+ summary: "Endpoint for the `accepted` connections number"
+ get:
+ operationId: getStatusConnectionsAccepted
+ summary: "Retrieve the accepted connections number"
+ description: "Retrieves the `accepted` connections number that represents
+ Unit's [connection statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `active` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Accepted:
+ value: 1067
+
+ /status/connections/active:
+ summary: "Endpoint for the `active` connections number"
+ get:
+ operationId: getStatusConnectionsActive
+ summary: "Retrieve the active connections number"
+ description: "Retrieves the `active` connections number that represents
+ Unit's [connection statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `active` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Active:
+ value: 13
+
+ /status/connections/idle:
+ summary: "Endpoint for the `idle` connections number"
+ get:
+ operationId: getStatusConnectionsIdle
+ summary: "Retrieve the idle connections number"
+ description: "Retrieves the `idle` connections number that represents
+ Unit's [connection statistics](https://unit.nginx.org/usagestats/)."
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `idle` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Idle:
+ value: 4
+
+ /status/connections/closed:
+ summary: "Endpoint for the `closed` connections number"
+ get:
+ operationId: getStatusConnectionsClosed
+ summary: "Retrieve the closed connections number"
+ description: "Retrieves the `closed` connections number that represents
+ Unit's [connection statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `closed` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Closed:
+ value: 4
+
+ /status/requests:
+ summary: "Endpoint for the `requests` status object"
+ get:
+ operationId: getStatusRequests
+ summary: "Retrieve the requests status object"
+ description: "Retrieves the `requests` status object that represents
+ Unit's instance [request statistics]
+ (https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `requests` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusRequests"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusRequests"
+
+ /status/requests/total:
+ summary: "Endpoint for the `total` requests number"
+ get:
+ operationId: getStatusRequestsTotal
+ summary: "Retrieve the total requests number"
+ description: "Retrieves the `total` requests number that represents Unit's
+ instance [request statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `total` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Closed:
+ value: 1307
+
+ /status/applications:
+ summary: "Endpoint for the `applications` status object"
+ get:
+ operationId: getStatusApplications
+ summary: "Retrieve the applications status object"
+ description: "Retrieves the `applications` status object that represents
+ Unit's per-app
+ [process and request statistics](https://unit.nginx.org/usagestats/)."
+ tags:
+ - status
+
+ responses:
+ "200":
+ description: "OK; the `applications` object exists in
+ the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusApplications"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusApplications"
+
+ /status/applications/{appName}:
+ summary: "Endpoint for the app status object"
+ get:
+ operationId: getStatusApplicationsApp
+ summary: "Retrieve the app status object"
+ description: "Retrieves the app status object that represents
+ Unit's per-app
+ [process and request statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the app object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusApplicationsApp"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusApplicationsApp"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status/applications/{appName}/processes:
+ summary: "Endpoint for the `processes` app status object"
+ get:
+ operationId: getStatusApplicationsAppProcesses
+ summary: "Retrieve the processes app status object"
+ description: "Retrieves the `processes` app status object that represents
+ Unit's per-app
+ [process statistics](https://unit.nginx.org/usagestats/)."
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `processes` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusApplicationsAppProcesses"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusApplicationsAppProcesses"
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status/applications/{appName}/processes/running:
+ summary: "Endpoint for the `running` processes number"
+ get:
+ operationId: getStatusApplicationsAppProcessesRunning
+ summary: "Retrieve the running processes app status number"
+ description: "Retrieves the `running` processes number that represents
+ Unit's per-app
+ [process statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `running` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Running:
+ value: 9
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status/applications/{appName}/processes/starting:
+ summary: "Endpoint for the `starting` processes number"
+ get:
+ operationId: getStatusApplicationsAppProcessesStarting
+ summary: "Retrieve the starting processes app status number"
+ description: "Retrieves the `starting` processes number that represents
+ Unit's per-app
+ [process statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `starting` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Starting:
+ value: 1
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status/applications/{appName}/processes/idle:
+ summary: "Endpoint for the `idle` processes number"
+ get:
+ operationId: getStatusApplicationsAppProcessesIdle
+ summary: "Retrieve the idle processes app status number"
+ description: "Retrieves the `idle` processes number that represents
+ Unit's per-app
+ [process statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `idle` number exists in the configuration."
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Idle:
+ value: 0
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+ /status/applications/{appName}/requests:
+ summary: "Endpoint for the `requests` app status object"
+ get:
+ operationId: getStatusApplicationsAppRequests
+ summary: "Retrieve the requests app status object"
+ description: "Retrieves the `requests` app status object that represents
+ Unit's per-app
+ [request statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `requests` object exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/statusApplicationsAppRequests"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/statusApplicationsAppRequests"
+
+ /status/applications/{appName}/requests/active:
+ summary: "Endpoint for the `active` requests number"
+ get:
+ operationId: getStatusApplicationsAppRequestsActive
+ summary: "Retrieve the active requests app status number"
+ description: "Retrieves the `active` requests number that represents
+ Unit's per-app
+ [request statistics](https://unit.nginx.org/usagestats/)."
+
+ tags:
+ - status
+
+ parameters:
+ - $ref: "#/components/parameters/appName"
+
+ responses:
+ "200":
+ description: "OK; the `active` number exists in the configuration."
+
+ content:
+ application/json:
+ schema:
+ type: integer
+
+ examples:
+ Idle:
+ value: 15
+
+ "404":
+ $ref: "#/components/responses/responseNotFound"
+
+components:
+ # -- PARAMETERS --
+
+ parameters:
+ appName:
+ in: path
+ description: "An application's name in the configuration."
+ name: appName
+ required: true
+ schema:
+ type: string
+
+ arrayIndex:
+ in: path
+ description: "A zero-based index in a configuration array."
+ name: arrayIndex
+ required: true
+ schema:
+ type: integer
+
+ arrayIndex2:
+ in: path
+ description: "A zero-based index in a configuration array."
+ name: arrayIndex2
+ required: true
+ schema:
+ type: integer
+
+ bundleName:
+ in: path
+ description: "A certificate bundle's name"
+ name: bundleName
+ required: true
+ schema:
+ type: string
+
+ listenerName:
+ in: path
+ description: "Listener name; a unique combination of a host IP address
+ (or a `*` wildcard to match any host IP addresses), followed by a colon
+ and a port number, such as `127.0.0.1:80` or `*:443`."
+
+ name: listenerName
+ required: true
+ schema:
+ type: string
+
+ mimeType:
+ in: path
+ description: "A MIME type name, such as `text/x-code` or
+ `application/json`."
+ name: mimeType
+ required: true
+ schema:
+ type: string
+
+ # -- EXAMPLES --
+
+ examples:
+ # -- RESPONSE EXAMPLES --
+
+ errorInvalidJson:
+ summary: "400 error response"
+ value:
+ error: "Invalid JSON."
+ detail: 'A valid JSON value is expected here. It must be either a
+ literal (null, true, or false), a number, a string (in double quotes
+ ""), an array (with brackets []), or an object (with braces {}).'
+
+ location:
+ offset: 0
+ line: 1
+ column: 0
+
+ errorValueDoesntExist:
+ summary: "404 error response"
+ value:
+ error: "Value doesn't exist."
+
+ errorInternalError:
+ summary: "500 error response"
+ value:
+ error: "Failed to apply new configuration."
+
+ successReconfigurationDone:
+ summary: "Success response"
+ value:
+ success: "Reconfiguration done."
+
+ # -- CONFIGURATION EXAMPLES --
+
+ # /certificates
+ cert:
+ summary: "Certificate bundle or bundles"
+ value:
+ bundle:
+ key: "RSA (4096 bits)"
+ chain:
+ - subject:
+ common_name: "example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+ alt_names:
+ - "example.com"
+ - "www.example.com"
+
+ issuer:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+
+ validity:
+ since: "Feb 22 22:45:55 2023 GMT"
+ until: "Feb 21 22:45:55 2016 GMT"
+
+ - subject:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ issuer:
+ common_name: "root.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ validity:
+ since: "Sep 18 19:46:19 2022 GMT"
+ until: "Jun 15 19:46:19 2025 GMT"
+
+ # /certificates/{bundleName}
+ certBundle:
+ summary: "Single certificate bundle"
+ value:
+ key: "RSA (4096 bits)"
+ chain:
+ - subject:
+ common_name: "example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+ alt_names:
+ - "example.com"
+ - "www.example.com"
+
+ issuer:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+
+ validity:
+ since: "Feb 22 22:45:55 2023 GMT"
+ until: "Feb 21 22:45:55 2016 GMT"
+
+ - subject:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ issuer:
+ common_name: "root.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ validity:
+ since: "Sep 18 19:46:19 2022 GMT"
+ until: "Jun 15 19:46:19 2025 GMT"
+
+ # /certificates/{bundleName}/chain
+ certBundleChain:
+ summary: "Certificate chain"
+ value:
+ - subject:
+ common_name: "example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+ alt_names:
+ - "example.com"
+ - "www.example.com"
+
+ issuer:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+
+ validity:
+ since: "Feb 22 22:45:55 2023 GMT"
+ until: "Feb 21 22:45:55 2016 GMT"
+
+ - subject:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ issuer:
+ common_name: "root.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme Certification Authority"
+
+ validity:
+ since: "Sep 18 19:46:19 2022 GMT"
+ until: "Jun 15 19:46:19 2025 GMT"
+
+ # /certificates/{bundleName}/chain/{arrayIndex}
+ certBundleChainCert:
+ summary: "Single certificate"
+ value:
+ subject:
+ common_name: "example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+ alt_names:
+ - "example.com"
+ - "www.example.com"
+
+ issuer:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+
+ validity:
+ since: "Feb 22 22:45:55 2023 GMT"
+ until: "Feb 21 22:45:55 2016 GMT"
+
+ # /certificates/{bundleName}/chain/{arrayIndex}/issuer
+ certBundleChainCertIssuer:
+ summary: "Certificate's issuer"
+ value:
+ common_name: "intermediate.ca.example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+
+ # /certificates/{bundleName}/chain/{arrayIndex}/subject
+ certBundleChainCertSubj:
+ summary: "Certificate's subject"
+ value:
+ common_name: "example.com"
+ country: "US"
+ state_or_province: "CA"
+ organization: "Acme, Inc."
+ alt_names:
+ - "example.com"
+ - "www.example.com"
+
+ # /certificates/{bundleName}/chain/{arrayIndex}/validity
+ certBundleChainCertValidity:
+ summary: "Certificate's validity"
+ value:
+ since: "Feb 22 22:45:55 2023 GMT"
+ until: "Feb 21 22:45:55 2016 GMT"
+
+ # /config
+ config:
+ summary: "The entire /config section of the API"
+ value:
+ access_log: "/var/log/unit/access.log"
+
+ applications:
+ nodejsapp:
+ type: "external"
+ working_directory: "/www/app/node-app/"
+ executable: "app.js"
+ user: "www"
+ group: "www"
+ arguments:
+ - "--tmp-files"
+ - "/tmp/node-cache"
+
+ pythonapp:
+ type: "python 3.11"
+ processes: 16
+ working_directory: "/www/app/python-app/"
+ path: "blog"
+ module: "blog.wsgi"
+ user: "www"
+ group: "www"
+ stderr: "stderr.log"
+ isolation:
+ rootfs: "/www/"
+
+ routes:
+ local:
+ - action:
+ share: "/www/local/"
+
+ global:
+ - match:
+ host: "backend.example.com"
+
+ action:
+ pass: "applications/pythonapp"
+
+ - action:
+ pass: "applications/nodejsapp"
+
+ listeners:
+ 127.0.0.1:8080:
+ pass: "routes/local"
+
+ "*:443":
+ pass: "routes/global"
+ tls:
+ certificate: "bundle"
+ conf_commands:
+ ciphersuites: "TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256"
+ minprotocol: "TLSv1.3"
+
+ session:
+ cache_size: 10240
+ timeout: 60
+ tickets:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+
+ forwarded:
+ client_ip: "X-Forwarded-For"
+ recursive: false
+ source:
+ - "192.0.2.0/24"
+ - "198.51.100.0/24"
+
+ settings:
+ http:
+ body_read_timeout: 30
+ discard_unsafe_fields: true
+ header_read_timeout: 30
+ idle_timeout: 180
+ log_route: true
+ max_body_size: 8388608
+ send_timeout: 30
+ server_version: false
+
+ # /config/access_log
+ configAccessLogBasic:
+ summary: "Basic access_log string"
+ value: "/var/log/unit/access.log"
+
+ # /config/access_log
+ configAccessLogComplex:
+ summary: "Complex access_log object"
+ value:
+ path: "/var/log/unit/access.log"
+ format: '$remote_addr - - [$time_local] "$request_line" $status
+ $body_bytes_sent "$header_referer" "$header_user_agent"'
+
+ # /config/applications/{appName}
+ configApplication:
+ summary: "Individual Unit application"
+ value:
+ type: "python 3.11"
+ processes: 16
+ working_directory: "/www/app/python-app/"
+ path: "blog"
+ module: "blog.wsgi"
+ user: "www"
+ group: "www"
+ stderr: "stderr.log"
+ stdout: "stdout.log"
+ isolation:
+ rootfs: "/www/"
+
+ # /config/applications
+ configApplications:
+ summary: "Entire Unit applications section"
+ value:
+ nodejsapp:
+ type: "external"
+ working_directory: "/www/app/node-app/"
+ executable: "app.js"
+ user: "www"
+ group: "www"
+ arguments:
+ - "--tmp-files"
+ - "/tmp/node-cache"
+
+ pythonapp:
+ type: "python 3.11"
+ processes: 16
+ working_directory: "/www/app/python-app/"
+ path: "blog"
+ module: "blog.wsgi"
+ user: "www"
+ group: "www"
+ stderr: "stderr.log"
+ isolation:
+ rootfs: "/www/"
+
+ # /config/listeners
+ configListeners:
+ summary: "Multiple listeners"
+ value:
+ 127.0.0.1:8080:
+ pass: "applications/wp_emea_dev"
+ "*:443":
+ pass: "applications/php_app/target"
+ tls:
+ certificate: "bundle"
+ conf_commands:
+ ciphersuites: "TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256"
+ minprotocol: "TLSv1.3"
+ session:
+ cache_size: 10240
+ timeout: 60
+ tickets:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+ forwarded:
+ client_ip: "X-Forwarded-For"
+ recursive: false
+ source:
+ - "192.0.2.0/24"
+ - "198.51.100.0/24"
+
+ # /config/listeners/{listenerName}
+ configListenerSimple:
+ summary: "Simple listener object"
+ value:
+ pass: "applications/wp_emea_dev"
+
+ # /config/listeners/{listenerName}
+ configListenerComplex:
+ summary: "Elaborate listener object"
+ value:
+ pass: "applications/php_app/target"
+ tls:
+ certificate: "bundle"
+ conf_commands:
+ ciphersuites: "TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256"
+ minprotocol: "TLSv1.3"
+ session:
+ cache_size: 10240
+ timeout: 60
+ tickets:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+ forwarded:
+ client_ip: "X-Forwarded-For"
+ recursive: false
+ protocol: "http"
+ source:
+ - "192.0.2.0/24"
+ - "198.51.100.0/24"
+
+ # /config/listeners/{listenerName}/forwarded
+ configListenerForwarded:
+ summary: "Originating IP identification configuration object"
+ value:
+ client_ip: "X-Forwarded-For"
+ recursive: false
+ source:
+ - "192.0.2.0/24"
+ - "198.51.100.0/24"
+
+ # /config/listeners/{listenerName}/forwarded/source
+ configListenerForwardedSourceArray:
+ summary: "Array of source address patterns"
+ value:
+ - "192.0.2.0/24"
+ - "198.51.100.0/24"
+
+ # /config/listeners/{listenerName}/forwarded/source
+ configListenerForwardedSourceString:
+ summary: "Single source address pattern"
+ value: "192.0.2.0/24"
+
+ # /config/listeners/{listenerName}/forwarded/client_ip
+ configListenerForwardedClientIp:
+ summary: "Client IP headers expected by a listener"
+ value: "X-Forwarded-For"
+
+ # /config/listeners/{listenerName}/forwarded/protocol
+ configListenerForwardedProtocol:
+ summary: "Protocol header expected by a listener"
+ value: "http"
+
+ # /config/listeners/{listenerName}/pass
+ configListenerPassApp:
+ summary: "Application destination in a listener"
+ value: "applications/wp_emea_dev"
+
+ # /config/listeners/{listenerName}/pass
+ configListenerPassRoute:
+ summary: "Route destination in a listener"
+ value: "routes/staticsite"
+
+ # /config/listeners/{listenerName}/pass
+ configListenerPassAppTarget:
+ summary: "App target destination in a listener"
+ value: "applications/php_app/index_target"
+
+ # /config/listeners/{listenerName}/tls
+ configListenerTls:
+ summary: "TLS object in a listener"
+ value:
+ certificate: "bundle"
+ conf_commands:
+ ciphersuites: "TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256"
+ minprotocol: "TLSv1.3"
+ session:
+ cache_size: 10240
+ timeout: 60
+ tickets:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+
+ # /config/listeners/{listenerName}/tls/certificate
+ configListenerTlsCertificateArray:
+ summary: "Array of certificate bundle names"
+ value:
+ - bundle_old
+ - bundle_new
+
+ # /config/listeners/{listenerName}/tls/certificate
+ configListenerTlsCertificateString:
+ summary: "Single certificate bundle name"
+ value: bundle
+
+ # /config/listeners/{listenerName}/tls/conf_commands
+ configListenerTlsConfCommands:
+ summary: "TLS configuration commands in an object"
+ value:
+ ciphersuites: "TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256"
+ minprotocol: "TLSv1.3"
+
+ # /config/listeners/{listenerName}/tls/session
+ configListenerTlsSession:
+ summary: "Session settings object"
+ value:
+ cache_size: 10240
+ timeout: 60
+ tickets:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+
+ # /config/listeners/{listenerName}/tls/session/tickets
+ configListenerTlsSessionTicketsBool:
+ summary: "Boolean value that enables or disables random tickets"
+ value: true
+
+ # /config/listeners/{listenerName}/tls/session/tickets
+ configListenerTlsSessionTicketsString:
+ summary: "Single session ticket key"
+ value: "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+
+ # /config/listeners/{listenerName}/tls/session/tickets
+ configListenerTlsSessionTicketsArray:
+ summary: "Multiple session ticket keys"
+ value:
+ - "IAMkP16P8OBuqsijSDGKTpmxrzfFNPP4EdRovXH2mqstXsodPC6MqIce5NlMzHLP"
+ - "Ax4bv/JvMWoQG+BfH0feeM9Qb32wSaVVKOj1+1hmyU8ORMPHnf3Tio8gLkqm2ifC"
+
+ # /config/routes
+ configRoutes:
+ summary: "Routes array"
+ value:
+ - action:
+ pass: "applications/${host}_php_handler"
+
+ match:
+ arguments:
+ mode: "strict"
+ fullAccess: "true"
+
+ uri: "~^/data/www/.*\\.php(/.*)?$"
+
+ - action:
+ share: "/www/data$uri"
+
+ match:
+ headers:
+ - User-Agent: "curl*"
+
+ source:
+ - "!192.168.1.1"
+ - "!10.1.1.0/16"
+ - "192.168.1.0/24"
+ - "2001:0db8::/32"
+
+ - action:
+ return: 301
+ location: "https://www.example.com"
+
+ # /config/settings
+ configSettings:
+ summary: "Global settings"
+ value:
+ http:
+ body_read_timeout: 30
+ discard_unsafe_fields: true
+ header_read_timeout: 30
+ idle_timeout: 180
+ log_route: true
+ max_body_size: 8388608
+ send_timeout: 30
+ server_version: false
+ static:
+ mime_types:
+ "text/x-code":
+ - ".c"
+ - ".h"
+
+ # /config/settings/http
+ configSettingsHttp:
+ summary: "HTTP settings"
+ value:
+ body_read_timeout: 30
+ discard_unsafe_fields: true
+ header_read_timeout: 30
+ idle_timeout: 180
+ log_route: true
+ max_body_size: 8388608
+ send_timeout: 30
+ server_version: false
+ static:
+ mime_types:
+ "text/x-code":
+ - ".c"
+ - ".h"
+
+ # /config/settings/http/static
+ configSettingsHttpStatic:
+ summary: "Static content settings"
+ value:
+ mime_types:
+ "text/x-code":
+ - ".c"
+ - ".h"
+
+ # /config/settings/http/static/mime_types/{optionName}
+ configSettingsHttpStaticMimeType:
+ summary: "Individual MIME type"
+ value:
+ - ".c"
+ - ".h"
+
+ # /config/settings/http/static/mime_types
+ configSettingsHttpStaticMimeTypes:
+ summary: "MIME types recognized by Unit"
+ value:
+ "text/x-code":
+ - ".c"
+ - ".h"
+
+ # /status
+ status:
+ summary: "Regular status object"
+ value:
+ connections:
+ accepted: 1067
+ active: 13
+ idle: 4
+ closed: 1050
+ requests:
+ total: 1307
+ applications:
+ wp:
+ processes:
+ running: 9
+ starting: 1
+ idle: 0
+ requests:
+ active: 15
+
+ # /status/connections
+ statusConnections:
+ summary: "Regular connections status object"
+ value:
+ accepted: 1067
+ active: 13
+ idle: 4
+ closed: 1050
+
+ # /status/applications
+ statusApplications:
+ summary: "Regular applications status object"
+ value:
+ wp:
+ processes:
+ running: 9
+ starting: 1
+ idle: 0
+ requests:
+ active: 15
+
+ # /status/applications/{appName}
+ statusApplicationsApp:
+ summary: "Regular app status object"
+ value:
+ processes:
+ running: 9
+ starting: 1
+ idle: 0
+ requests:
+ active: 15
+
+ # /status/applications/{appName}/processes
+ statusApplicationsAppProcesses:
+ summary: "Regular app processes status object"
+ value:
+ running: 9
+ starting: 1
+ idle: 0
+
+ # /status/applications/{appName}/requests
+ statusApplicationsAppRequests:
+ summary: "Regular app requests status object"
+ value:
+ active: 15
+
+ # /status/requests
+ statusRequests:
+ summary: "Regular requests status object"
+ value:
+ total: 1307
+
+ # -- RESPONSES --
+
+ responses:
+ responseOkDeleted:
+ description: "OK; the value was deleted."
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonSuccessMessage"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/successReconfigurationDone"
+
+ responseOkUpdated:
+ description: "OK; the value was updated."
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonSuccessMessage"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/successReconfigurationDone"
+
+ responseBadRequest:
+ description: "Bad Request; invalid JSON payload was provided.
+ This may occur if the payload supplied doesn't match the JSON schema for
+ the respective configuration section."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonErrorMessage"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/errorInvalidJson"
+
+ responseNotFound:
+ description: "Not Found; the value does not exist in the configuration.
+ This may occur if any part of the path is non-existent."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonErrorMessage"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/errorValueDoesntExist"
+
+ responseInternalError:
+ description: "Internal server error; the configuration wasn't applied.
+ This may occur with misconfigured paths, wrong permissions, etc."
+
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/jsonErrorMessage"
+
+ examples:
+ example1:
+ $ref: "#/components/examples/errorInternalError"
+
+ # -- SCHEMAS --
+
+ schemas:
+ # -- GENERIC REUSABLE OBJECTS --
+
+ stringArray:
+ type: array
+ description: "An array of strings."
+ items:
+ type: string
+
+ stringOrStringArray:
+ description: "A string or an array of strings."
+ oneOf:
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ jsonSuccessMessage:
+ type: object
+ description: "JSON message on success."
+ additionalProperties:
+ type: string
+
+ jsonErrorMessage:
+ type: object
+ description: "JSON message on error."
+ additionalProperties:
+ type: string
+
+ # Configuration sections as data types; hugely reliant on each other
+
+ # /certificates
+ cert:
+ type: object
+ description: "An object whose options represent certificate bundles."
+ additionalProperties:
+ $ref: "#/components/schemas/certBundle"
+
+ # /certificates/{bundleName}
+ certBundle:
+ type: object
+ description: "An object whose options represent a certificate bundle."
+ properties:
+ key:
+ type: string
+ description: "Certificate bundle's key type, i. e. RSA, ECDSA, etc."
+
+ chain:
+ $ref: "#/components/schemas/certBundleChain"
+
+ # /certificates/{bundleName}/chain
+ certBundleChain:
+ type: array
+ description: "An array whose items represent certificates in a bundle."
+ items:
+ $ref: "#/components/schemas/certBundleChainCert"
+
+ # /certificates/{bundleName}/chain/{certIndex}
+ certBundleChainCert:
+ type: object
+ description: "An object that represents an individual certificate."
+ properties:
+ subject:
+ $ref: "#/components/schemas/certBundleChainCertSubj"
+
+ issuer:
+ $ref: "#/components/schemas/certBundleChainCertIssuer"
+
+ validity:
+ $ref: "#/components/schemas/certBundleChainCertValidity"
+
+ # /certificates/{bundleName}/chain/{certIndex}/subject
+ certBundleChainCertSubj:
+ type: object
+ description: "An object that represents a certificate's subject."
+ properties:
+ common_name:
+ type: string
+
+ country:
+ type: string
+
+ state_or_province:
+ type: string
+
+ organization:
+ type: string
+
+ alt_names:
+ $ref: "#/components/schemas/stringArray"
+
+ # /certificates/{bundleName}/chain/{certIndex}/issuer
+ certBundleChainCertIssuer:
+ type: object
+ description: "An object that represents a certificate's issuer."
+ properties:
+ common_name:
+ type: string
+
+ country:
+ type: string
+
+ state_or_province:
+ type: string
+
+ organization:
+ type: string
+
+ # /certificates/{bundleName}/chain/{certIndex}/validity
+ certBundleChainCertValidity:
+ type: object
+ description: "An object that represents the validity of a certificate."
+ properties:
+ since:
+ type: string
+
+ until:
+ type: string
+
+ # /config
+ config:
+ type: object
+ description: "The entire /config section of the API."
+ properties:
+ access_log:
+ $ref: "#/components/schemas/configAccessLog"
+
+ applications:
+ $ref: "#/components/schemas/configApplications"
+
+ routes:
+ $ref: "#/components/schemas/configRoutes"
+
+ listeners:
+ $ref: "#/components/schemas/configListeners"
+
+ settings:
+ $ref: "#/components/schemas/configSettings"
+
+ # /config/access_log
+ configAccessLog:
+ description: "Configures the access log."
+ anyOf:
+ - type: string
+ - $ref: "#/components/schemas/configAccessLogObject"
+
+ # /config/access_log
+ configAccessLogObject:
+ description: "Configures the access log."
+ type: object
+ properties:
+ format:
+ type: string
+ description: "Sets the log format. Besides arbitrary text, can contain
+ any variables Unit supports."
+
+ default: '$remote_addr - - [$time_local] "$request_line" $status
+ $body_bytes_sent "$header_referer" "$header_user_agent"'
+
+ path:
+ type: string
+ description: "Pathname of the access log file."
+
+ # /config/applications
+ configApplications:
+ type: object
+ description: "An object whose options define individual applications."
+ additionalProperties:
+ $ref: "#/components/schemas/configApplication"
+
+ # /config/applications/{appName}
+ configApplication:
+ type: object
+ description: "An object that defines an individual application."
+ anyOf:
+ - $ref: "#/components/schemas/configApplicationExternal"
+ - $ref: "#/components/schemas/configApplicationJava"
+ - $ref: "#/components/schemas/configApplicationPerl"
+ - $ref: "#/components/schemas/configApplicationPHP"
+ - $ref: "#/components/schemas/configApplicationPython"
+ - $ref: "#/components/schemas/configApplicationRuby"
+
+ discriminator:
+ propertyName: type
+ mapping:
+ external: "#/components/schemas/configApplicationExternal"
+ java: "#/components/schemas/configApplicationJava"
+ perl: "#/components/schemas/configApplicationPerl"
+ php: "#/components/schemas/configApplicationPHP"
+ python: "#/components/schemas/configApplicationPython"
+ ruby: "#/components/schemas/configApplicationRuby"
+
+ # ABSTRACT BASE SCHEMA, NOT PRESENT IN THE CONFIGURATION; STORES COMMON OPTIONS
+ configApplicationCommon:
+ type: object
+ description: "Common application object options."
+ required:
+ - type
+
+ properties:
+ type:
+ type: string
+ description: "Application type and language version."
+ enum: [external, java, perl, php, python, ruby]
+
+ environment:
+ type: object
+ description: "Environment variables to be passed to the app."
+ additionalProperties:
+ type: string
+
+ group:
+ type: string
+ description: "Group name that runs the app process."
+
+ isolation:
+ type: object
+ description: "Manages the isolation of an application process."
+ properties:
+ automount:
+ type: object
+ description: "Controls mount behavior if rootfs is enabled."
+ properties:
+ language_deps:
+ type: boolean
+ description: "Controls whether the language runtime
+ dependencies are automounted."
+
+ default: true
+
+ procfs:
+ type: boolean
+ description: "Controls whether the procfs is automounted."
+ default: true
+
+ tmpfs:
+ type: boolean
+ description: "Controls whether the tmpfs is automounted."
+ default: true
+
+ cgroup:
+ type: object
+ description: "Defines the app’s cgroup."
+ required:
+ - path
+
+ properties:
+ path:
+ type: string
+ description: "Configures absolute or relative path of the app
+ in the cgroups v2 hierarchy."
+
+ gidmap:
+ type: array
+ description: "Array of group ID mapping objects."
+ items:
+ type: object
+ description: "Group ID mapping object."
+ required:
+ - container
+ - host
+ - size
+
+ properties:
+ container:
+ type: integer
+ description: "Starts the group ID mapping range in the
+ app’s namespace."
+
+ host:
+ type: integer
+ description: "Starts the group ID mapping range in the
+ OS namespace."
+
+ size:
+ type: integer
+ description: "Size of the ID range in both namespaces."
+
+ namespaces:
+ type: object
+ properties:
+ cgroup:
+ type: boolean
+ description: "Creates a new cgroup namespace for the app."
+ default: false
+
+ credential:
+ type: boolean
+ description: "Creates a new user namespace for the app."
+ default: false
+
+ mount:
+ type: boolean
+ description: "Creates a new mount namespace for the app."
+ default: false
+
+ network:
+ type: boolean
+ description: "Creates a new network namespace for the app."
+ default: false
+
+ pid:
+ type: boolean
+ description: "Creates a new PID namespace for the app."
+ default: false
+
+ uname:
+ type: boolean
+ description: "Creates a new UTS namespace for the app."
+ default: false
+
+ rootfs:
+ type: string
+ description: "pathname of the directory to be used as the new
+ file system root for the app."
+
+ uidmap:
+ type: array
+ description: "Array of user ID mapping objects."
+ items:
+ type: object
+ description: "User ID mapping object."
+ required:
+ - container
+ - host
+ - size
+
+ properties:
+ container:
+ type: integer
+ description: "Starts the user ID mapping range in the
+ app’s namespace."
+
+ host:
+ type: integer
+ description: "Starts the user ID mapping range in the
+ OS namespace."
+
+ size:
+ type: integer
+ description: "Size of the ID range in both namespaces."
+
+ limits:
+ type: object
+ description: "Governs the life cycle of an application process."
+ properties:
+ requests:
+ type: integer
+ description: "Maximum number of requests an app process
+ can serve."
+
+ timeout:
+ type: integer
+ description: "Request timeout in seconds."
+
+ processes:
+ description: "Governs the behavior of app processes."
+ anyOf:
+ - type: integer
+ - type: object
+ properties:
+ idle_timeout:
+ type: integer
+ description: "Number of seconds Unit waits for before
+ terminating an idle process that exceeds `spare`."
+
+ max:
+ type: integer
+ description: "Maximum number of application processes that
+ Unit maintains (busy and idle)."
+
+ default: 1
+
+ idle:
+ type: integer
+ description: "Minimum number of idle processes that Unit tries
+ to maintain for an app."
+
+ default: 1
+
+ user:
+ type: string
+ description: "Username that runs the app process."
+
+ stderr:
+ type: string
+ description: "Filename where Unit redirects the app's stderr stream."
+
+ stdout:
+ type: string
+ description: "Filename where Unit redirects the app's stdout stream."
+
+ working_directory:
+ type: string
+ description: "The app’s working directory."
+
+ configApplicationExternal:
+ description: "Go or Node.js application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - executable
+
+ properties:
+ executable:
+ type: string
+ description: "Pathname of the app, absolute or relative
+ to `working_directory`."
+
+ arguments:
+ description: "Command-line arguments to be passed to the app."
+ $ref: "#/components/schemas/stringArray"
+
+ configApplicationJava:
+ description: "Java application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - webapp
+
+ properties:
+ webapp:
+ type: string
+ description: "Pathname of the application’s .war file
+ (packaged or unpackaged)."
+
+ classpath:
+ description: "Paths to your app’s required libraries
+ (may point to directories or individual .jar files)."
+
+ $ref: "#/components/schemas/stringArray"
+
+ options:
+ desription: "JVM runtime options."
+ $ref: "#/components/schemas/stringArray"
+
+ thread_stack_size:
+ type: integer
+ description: "Stack size of a worker thread in bytes."
+
+ threads:
+ type: integer
+ description: "Number of worker threads per app process."
+ default: 1
+
+ configApplicationPerl:
+ description: "Perl application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - script
+
+ properties:
+ script:
+ type: string
+ description: "PSGI script path."
+
+ thread_stack_size:
+ type: integer
+ description: "Stack size of a worker thread in bytes."
+
+ threads:
+ type: integer
+ description: "Number of worker threads per app process."
+ default: 1
+
+ configApplicationPHP:
+ description: "PHP application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - root
+
+ properties:
+ root:
+ type: string
+ description: "Base directory of the app’s file structure."
+
+ index:
+ type: string
+ description: "Filename added to URI paths that point to
+ directories if no `script` is set."
+
+ default: "index.php"
+
+ options:
+ type: object
+ description: "Defines the php.ini location and options."
+ properties:
+ admin:
+ type: object
+ description: "Extra directives set in PHP_INI_SYSTEM mode."
+
+ additionalProperties:
+ type: string
+
+ file:
+ type: string
+ description: "Pathname of the php.ini file."
+
+ user:
+ type: object
+ description: "Extra directives set in PHP_INI_USER mode."
+ additionalProperties:
+ type: string
+
+ script:
+ type: string
+ description: "Filename of a `root`-based PHP script that serves
+ all requests to the app."
+
+ targets:
+ type: object
+ description: "Application sections with custom `root`, `script`,
+ and `index` values."
+
+ additionalProperties:
+ type: object
+ required:
+ - root
+
+ properties:
+ root:
+ type: string
+ description: "Base directory of the target’s
+ file structure."
+
+ index:
+ type: string
+ description: "Filename added to URI paths that point to
+ directories if no `script` is set."
+
+ default: "index.php"
+
+ script:
+ type: string
+ description: "Filename of a `root`-based PHP script that
+ serves all requests to the target."
+
+ configApplicationPython:
+ description: "Python application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - module
+
+ properties:
+ module:
+ type: string
+ description: "App’s module name."
+
+ callable:
+ type: string
+ description: "Name of the `module`-based callable that Unit runs
+ as the app."
+
+ default: "application"
+
+ home:
+ type: string
+ description: "Path to the app’s virtual environment, absolute or
+ relative to `working_directory`."
+
+ path:
+ description: "Additional Python module lookup paths."
+ anyOf:
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ prefix:
+ type: string
+ description: "SCRIPT_NAME context value for WSGI or the
+ root_path context value for ASGI."
+
+ protocol:
+ description: "Hints Unit that the app uses a certain interface."
+ enum:
+ - "asgi"
+ - "wsgi"
+
+ targets:
+ type: object
+ description: "App sections with custom `module` and
+ `callable` values."
+
+ additionalProperties:
+ type: object
+ required:
+ - module
+
+ properties:
+ module:
+ type: string
+ description: "Target's module name."
+
+ callable:
+ type: string
+ description: "Name of the `module`-based callable that Unit
+ runs as the target."
+
+ default: "application"
+
+ prefix:
+ type: string
+ description: "SCRIPT_NAME context value for WSGI or the
+ root_path context value for ASGI."
+
+ thread_stack_size:
+ type: integer
+ description: "Stack size of a worker thread in bytes."
+
+ threads:
+ type: integer
+ description: "Number of worker threads per app process."
+ default: 1
+
+ configApplicationRuby:
+ description: "Ruby application on Unit."
+ allOf:
+ - $ref: "#/components/schemas/configApplicationCommon"
+ - type: object
+ required:
+ - script
+
+ properties:
+ script:
+ type: string
+ description: "Rack script pathname, including the .ru extension."
+
+ hooks:
+ type: string
+ description: "Pathname of the .rb file setting the event hooks
+ invoked during the app’s lifecycle."
+
+ threads:
+ type: integer
+ description: "Number of worker threads per app process."
+ default: 1
+
+ #/config/routes
+ configRoutes:
+ description: "Configures the routes."
+ anyOf:
+ - $ref: "#/components/schemas/configRouteArray"
+ - $ref: "#/components/schemas/configRoutesObject"
+
+ #/config/routes/{routeName} or /config/routes
+ configRouteArray:
+ type: array
+ description: "An array whose items define individual route steps."
+ items:
+ $ref: "#/components/schemas/configRouteStep"
+
+ #/config/routes
+ configRoutesObject:
+ type: object
+ description: "An object whose options define individual routes."
+ additionalProperties:
+ description: "Individual route arrays."
+ $ref: "#/components/schemas/configRouteArray"
+
+ #/config/routes/{stepIndex}
+ #/config/routes/{routeName}/{stepIndex}
+ configRouteStep:
+ type: object
+ description: "An object whose options define a step's
+ conditions and action."
+
+ required:
+ - action
+
+ properties:
+ action:
+ description: "Defines how matching requests are handled."
+ $ref: "#/components/schemas/configRouteStepAction"
+
+ match:
+ description: "Defines the step’s conditions to be matched."
+ $ref: "#/components/schemas/configRouteStepMatch"
+
+ #/config/routes/{stepIndex}/match
+ #/config/routes/{routeName}/{stepIndex}/match
+ configRouteStepMatch:
+ type: object
+ description: "An object whose options define a step's conditions."
+ properties:
+ arguments:
+ description: "Arguments supplied with the request’s query string."
+ anyOf:
+ - $ref: "#/components/schemas/configRouteStepMatchObject"
+ - $ref: "#/components/schemas/configRouteStepMatchObjectArray"
+
+ cookies:
+ description: "Cookies supplied with the request."
+ anyOf:
+ - $ref: "#/components/schemas/configRouteStepMatchObject"
+ - $ref: "#/components/schemas/configRouteStepMatchObjectArray"
+
+ destination:
+ description: "Target IP address and optional port of the request."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ headers:
+ description: "Header fields supplied with the request."
+ anyOf:
+ - $ref: "#/components/schemas/configRouteStepMatchObject"
+ - $ref: "#/components/schemas/configRouteStepMatchObjectArray"
+
+ host:
+ description: "Host header field."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ method:
+ description: "Method from the request line."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ query:
+ description: "Query string."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ scheme:
+ description: "URI scheme. Accepts only two patterns,
+ either `http` or `https`."
+
+ enum:
+ - "http"
+ - "https"
+
+ source:
+ description: "Source IP address and optional port of the request."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ uri:
+ description: "Request target."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ #/config/routes/{stepIndex}/match/[arguments|cookies|headers]
+ #/config/routes/{routeName}/{stepIndex}/match/[arguments|cookies|headers]
+ configRouteStepMatchObject:
+ type: object
+ description: "An object whose options define a set of conditions."
+ additionalProperties:
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ #/config/routes/{stepIndex}/match/[arguments|cookies|headers]
+ #/config/routes/{routeName}/{stepIndex}/match/[arguments|cookies|headers]
+ configRouteStepMatchObjectArray:
+ type: array
+ description: "An array whose items define sets of conditions."
+ items:
+ $ref: "#/components/schemas/configRouteStepMatchObject"
+
+ #/config/routes/{stepIndex}/action
+ #/config/routes/{routeName}/{stepIndex}/action
+ configRouteStepAction:
+ type: object
+ description: "An object whose options define a step's action."
+ oneOf:
+ - $ref: "#/components/schemas/configRouteStepActionPass"
+ - $ref: "#/components/schemas/configRouteStepActionReturn"
+ - $ref: "#/components/schemas/configRouteStepActionShare"
+
+ #/config/routes/{stepIndex}/action/pass
+ #/config/routes/{routeName}/{stepIndex}/action/pass
+ configRouteStepActionPass:
+ type: object
+ description: "An object whose single option defines a step's pass action."
+ required:
+ - pass
+ properties:
+ pass:
+ type: string
+ description: "Destination to which the action passes
+ incoming requests."
+
+ #/config/routes/{stepIndex}/action/return
+ #/config/routes/{routeName}/{stepIndex}/action/return
+ configRouteStepActionReturn:
+ type: object
+ description: "An object whose single option defines a step's
+ return action."
+
+ required:
+ - return
+
+ properties:
+ return:
+ type: integer
+ description: "Defines the HTTP response status code to be returned."
+
+ location:
+ type: string
+ description: "URI; used if the return value implies redirection."
+
+ #/config/routes/{stepIndex}/action/share
+ #/config/routes/{routeName}/{stepIndex}/action/share
+ configRouteStepActionShare:
+ type: object
+ description: "An object whose single option defines a step's
+ share action."
+
+ required:
+ - share
+
+ properties:
+ share:
+ description: "Lists file paths that are tried until a file is found."
+ $ref: "#/components/schemas/stringOrStringArray"
+
+ index:
+ type: string
+ description: "Filename; tried if share is a directory."
+ default: "index.html"
+
+ fallback:
+ description: "Used if the request can’t be served by share or index."
+ $ref: "#/components/schemas/configRouteStepAction"
+
+ types:
+ description: "Used to filter the shared files."
+ $ref: "#/components/schemas/stringArray"
+
+ chroot:
+ type: string
+ description: "Directory pathname that restricts the shareable paths."
+
+ follow_symlinks:
+ type: boolean
+ description: "Turns on and off symbolic link resolution."
+ default: true
+
+ traverse_mounts:
+ type: boolean
+ description: "Turns on and off mount point resolution."
+ default: true
+
+ # /config/listeners/
+ configListeners:
+ type: object
+ description: "An object whose options are listeners."
+ additionalProperties:
+ $ref: "#/components/schemas/configListener"
+
+ # /config/listeners/{listenerName}
+ configListener:
+ type: object
+ description: "An individual listener."
+ properties:
+ tls:
+ $ref: "#/components/schemas/configListenerTls"
+ forwarded:
+ $ref: "#/components/schemas/configListenerForwarded"
+ pass:
+ type: string
+ description: "Destination to which the listener passes
+ incoming requests."
+
+ # /config/listeners/{listenerName}/tls/certificate
+ configListenerTlsCertificate:
+ description: "Refers to one or more certificate bundles uploaded earlier."
+ anyOf:
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ # /config/listeners/{listenerName}/tls/conf_commands
+ configListenerTlsConfCommands:
+ type: object
+ description: "Defines the SSL configuration commands to be set for
+ the listener."
+ additionalProperties:
+ type: string
+
+ # /config/listeners/{listenerName}/tls
+ configListenerTls:
+ type: object
+ description: "Defines SSL/TLS settings for the listener."
+ required:
+ - certificate
+
+ properties:
+ conf_commands:
+ $ref: "#/components/schemas/configListenerTlsConfCommands"
+
+ session:
+ $ref: "#/components/schemas/configListenerTlsSession"
+
+ certificate:
+ $ref: "#/components/schemas/configListenerTlsCertificate"
+
+ # /config/listeners/{listenerName}/tls/session
+ configListenerTlsSession:
+ type: object
+ description: "Configures the TLS session cache and tickets for
+ the listener."
+
+ properties:
+ cache_size:
+ type: integer
+ description: "Number of sessions in the TLS session cache."
+ default: 0
+
+ timeout:
+ type: integer
+ description: "Session timeout for the TLS session cache in seconds."
+ default: 300
+
+ tickets:
+ $ref: "#/components/schemas/configListenerTlsSessionTickets"
+
+ # /config/listeners/{listenerName}/tls/session/tickets
+ configListenerTlsSessionTickets:
+ description: "Configures TLS session tickets."
+ anyOf:
+ - type: boolean
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ default: false
+
+ # /config/listeners/{listenerName}/forwarded
+ configListenerForwarded:
+ type: object
+ description: "Configures client IP address and protocol replacement."
+ required:
+ - source
+
+ properties:
+ client_ip:
+ type: string
+ description: "Defines the HTTP header fields to expect in the request;
+ uses the `X-Forwarded-For` format."
+
+ source:
+ description: "Defines address-based patterns for trusted addresses."
+ anyOf:
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ recursive:
+ type: boolean
+ description: "Controls how the `client_ip` fields are traversed."
+ default: false
+
+ protocol:
+ description: "Defines the relevant HTTP header field to expect in the
+ request; uses the `X-Forwarded-Proto` format."
+
+ enum:
+ - "http"
+ - "https"
+ - "on"
+
+ # /config/settings
+ configSettings:
+ type: object
+ description: "An object whose single option represents global
+ Unit settings."
+
+ properties:
+ http:
+ description: "Represents global HTTP settings in Unit."
+ $ref: "#/components/schemas/configSettingsHttp"
+
+ # /config/settings/http
+ configSettingsHttp:
+ type: object
+ description: "An object whose options represent global HTTP settings
+ in Unit."
+
+ properties:
+ body_read_timeout:
+ type: integer
+ description: "Maximum number of seconds to read data from the body of
+ a client’s request."
+
+ default: 30
+
+ discard_unsafe_fields:
+ type: boolean
+ description: "If `true`, Unit only processes header names made of
+ alphanumerics and hyphens."
+
+ default: true
+
+ header_read_timeout:
+ type: integer
+ description: "Maximum number of seconds to read the header of a
+ client’s request."
+
+ default: 30
+
+ idle_timeout:
+ type: integer
+ description: "Maximum number of seconds between requests in a
+ keep-alive connection."
+
+ default: 180
+
+ log_route:
+ type: boolean
+ description: "Enables or disables router logging."
+ default: false
+
+ max_body_size:
+ type: integer
+ description: "Maximum number of bytes in the body of a
+ client’s request."
+
+ default: 8388608
+
+ send_timeout:
+ type: integer
+ description: "Maximum number of seconds to transmit data as a
+ response to the client."
+ default: 30
+
+ server_version:
+ type: boolean
+ description: "Enables or disables version numbers in Unit's `Server`
+ header fields."
+
+ default: true
+
+ static:
+ description: "Configures static asset handling."
+ $ref: "#/components/schemas/configSettingsHttpStatic"
+
+ # /config/settings/http/static
+ configSettingsHttpStatic:
+ type: object
+ description: "An object whose single option defines specific MIME types."
+ properties:
+ mime_types:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeTypes"
+
+ # /config/settings/http/static/mime_types
+ configSettingsHttpStaticMimeTypes:
+ type: object
+ description: "An object whose options define individual MIME types."
+ additionalProperties:
+ $ref: "#/components/schemas/configSettingsHttpStaticMimeType"
+
+ # /config/settings/http/static/mime_types/{mimeType}
+ configSettingsHttpStaticMimeType:
+ description: "An entity that defines an individual MIME type by
+ listing file extensions."
+
+ anyOf:
+ - type: string
+ - $ref: "#/components/schemas/stringArray"
+
+ # /status
+ status:
+ description: "Represents Unit's usage statistics."
+ type: object
+ properties:
+ connections:
+ $ref: "#/components/schemas/statusConnections"
+
+ requests:
+ $ref: "#/components/schemas/statusRequests"
+
+ applications:
+ $ref: "#/components/schemas/statusApplications"
+
+ # /status/applications
+ statusApplications:
+ description: "Lists Unit's application process and request statistics."
+ type: object
+ additionalProperties:
+ $ref: "#/components/schemas/statusApplicationsApp"
+
+ # /status/applications/{appName}
+ statusApplicationsApp:
+ description: "Represents Unit's per-app process and request statistics."
+ type: object
+ properties:
+ processes:
+ $ref: "#/components/schemas/statusApplicationsAppProcesses"
+
+ requests:
+ $ref: "#/components/schemas/statusApplicationsAppRequests"
+
+ # /status/applications/{appName}/processes
+ statusApplicationsAppProcesses:
+ description: "Represents Unit's per-app process statistics."
+ type: object
+ properties:
+ running:
+ type: integer
+ description: "Current running app processes."
+
+ starting:
+ type: integer
+ description: "Current starting app processes."
+
+ idle:
+ type: integer
+ description: "Current idle app processes."
+
+ # /status/applications/{appName}/requests
+ statusApplicationsAppRequests:
+ description: "Represents Unit's per-app request statistics."
+ type: object
+ properties:
+ active:
+ type: integer
+ description: "Active app requests."
+
+ # /status/requests
+ statusRequests:
+ description: "Represents Unit's per-instance request statistics."
+ type: object
+ properties:
+ total:
+ type: integer
+ description: "Total non-API requests during the instance’s lifetime."
+
+ # /status/connections
+ statusConnections:
+ description: "Represents Unit's per-instance connection statistics."
+ type: object
+ properties:
+ accepted:
+ type: integer
+ description: "Total accepted connections during the
+ instance’s lifetime."
+
+ active:
+ type: integer
+ description: "Current active connections for the instance."
+
+ idle:
+ type: integer
+ description: "Current idle connections for the instance."
+
+ closed:
+ type: integer
+ description: "Total closed connections during
+ the instance’s lifetime."
+
+# -- TAGS --
+
+tags:
+ - name: access log
+ description: Everything about the access log in the /config section
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#access-log
+
+ - name: apps
+ description: Everything about applications
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#applications
+
+ - name: certificates
+ description:
+ Everything about the /certificates section in Unit's control API
+ in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/certificates/
+
+ - name: config
+ description: Everything about the /config section in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/configuration/
+
+ - name: control
+ description: Everything about the /control section in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/controlapi/
+
+ - name: listeners
+ description: Everything about listeners in the /config section
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#listeners
+
+ - name: routes
+ description: Everything about routes in the /config section
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#routes
+
+ - name: settings
+ description: Everything about the global settings in the /config section
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#settings
+
+ - name: status
+ description: Everything about the /status section in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/usagestats/
+
+ - name: tls
+ description: Everything about SSL/TLS in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/certificates/
+
+ - name: xff
+ description: Everything about X-Forwarded-* handling in Unit's control API
+ externalDocs:
+ url: https://unit.nginx.org/configuration/#ip-protocol-forwarding
+
+externalDocs:
+ description: "Find us on GitHub"
+ url: "https://github.com/nginx/unit"
diff --git a/pkg/contrib/src/libunit-wasm/Makefile b/pkg/contrib/src/libunit-wasm/Makefile
new file mode 100644
index 00000000..51c24456
--- /dev/null
+++ b/pkg/contrib/src/libunit-wasm/Makefile
@@ -0,0 +1,23 @@
+# libunit-wasm
+
+include $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/version
+LIBUNIT_WASM_URL := https://github.com/nginx/unit-wasm.git
+
+PKGS += libunit-wasm
+
+DEPS_libunit-wasm = wasi-sysroot $(DEPS_wasi-sysroot)
+
+$(TARBALLS)/libunit-wasm-$(LIBUNIT_WASM_GITHASH).tar.xz:
+ $(call download_git,$(LIBUNIT_WASM_URL),,$(LIBUNIT_WASM_GITHASH))
+
+.sum-libunit-wasm: libunit-wasm-$(LIBUNIT_WASM_GITHASH).tar.xz
+ $(call check_githash,$(LIBUNIT_WASM_GITHASH))
+ touch $@
+
+libunit-wasm: libunit-wasm-$(LIBUNIT_WASM_GITHASH).tar.xz
+ $(UNPACK)
+ $(MOVE)
+
+.libunit-wasm: libunit-wasm
+ cd $< && CFLAGS= make WASI_SYSROOT=$(TOPSRC)wasi-sysroot V=1 libunit-wasm
+ touch $@
diff --git a/pkg/contrib/src/libunit-wasm/version b/pkg/contrib/src/libunit-wasm/version
new file mode 100644
index 00000000..7ca15f98
--- /dev/null
+++ b/pkg/contrib/src/libunit-wasm/version
@@ -0,0 +1,2 @@
+LIBUNIT_WASM_VERSION := 0.1.0
+LIBUNIT_WASM_GITHASH := d6ed6a219b31a58526721f96195c80061d41ce54
diff --git a/pkg/contrib/src/njs/SHA512SUMS b/pkg/contrib/src/njs/SHA512SUMS
index c94e5638..ad8e180c 100644
--- a/pkg/contrib/src/njs/SHA512SUMS
+++ b/pkg/contrib/src/njs/SHA512SUMS
@@ -1 +1 @@
-9cac2ced65bbfd712f7797f2bfa3fb20509a7e7bd68e8621d5fad32270f6d20a015d707665222559a72f525618bc91e09986a7bedce28af5f0fec9c20be41452 njs-0.7.12.tar.gz
+200f3ae1e1909f0d8086e2fbfbd6b8654e596f3ad2e4cf4d863e201cfcb2f86a419fa9061067cbededf6a8c792c1a5ecf60c3a4c983af044c179bb9fe619eea5 njs-0.8.0.tar.gz
diff --git a/pkg/contrib/src/njs/version b/pkg/contrib/src/njs/version
index 64999f82..8c9ee6ba 100644
--- a/pkg/contrib/src/njs/version
+++ b/pkg/contrib/src/njs/version
@@ -1 +1 @@
-NJS_VERSION := 0.7.12
+NJS_VERSION := 0.8.0
diff --git a/pkg/contrib/src/wasi-sysroot/Makefile b/pkg/contrib/src/wasi-sysroot/Makefile
new file mode 100644
index 00000000..fcfb8df3
--- /dev/null
+++ b/pkg/contrib/src/wasi-sysroot/Makefile
@@ -0,0 +1,17 @@
+# wasi-sysroot
+
+include $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/version
+WASI_SYSROOT_URL := https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-$(WASI_SYSROOT_VERSION_MAJOR)/wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MINOR).tar.gz
+
+PKGS += wasi-sysroot
+
+$(TARBALLS)/wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MINOR).tar.gz:
+ $(call download_pkg,$(WASI_SYSROOT_URL),wasi-sysroot)
+
+.sum-wasi-sysroot: wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MINOR).tar.gz
+
+wasi-sysroot: wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MINOR).tar.gz .sum-wasi-sysroot
+ $(UNPACK)
+
+.wasi-sysroot: wasi-sysroot
+ touch $@
diff --git a/pkg/contrib/src/wasi-sysroot/SHA512SUMS b/pkg/contrib/src/wasi-sysroot/SHA512SUMS
new file mode 100644
index 00000000..a1e71fff
--- /dev/null
+++ b/pkg/contrib/src/wasi-sysroot/SHA512SUMS
@@ -0,0 +1 @@
+ad4ad629d02f01f3d2eb977dd0bc43091b0f11ed1b5dd9fdb3580e4cf49c132f6cb4982ae80eabf638f0d08d0c4c7df40cceb2be8f9d2c29abc35b8564ffda42 wasi-sysroot-20.0.tar.gz
diff --git a/pkg/contrib/src/wasi-sysroot/version b/pkg/contrib/src/wasi-sysroot/version
new file mode 100644
index 00000000..919c7098
--- /dev/null
+++ b/pkg/contrib/src/wasi-sysroot/version
@@ -0,0 +1,2 @@
+WASI_SYSROOT_VERSION_MAJOR := 20
+WASI_SYSROOT_VERSION_MINOR := 0
diff --git a/pkg/contrib/src/wasmtime/Makefile b/pkg/contrib/src/wasmtime/Makefile
new file mode 100644
index 00000000..11797fee
--- /dev/null
+++ b/pkg/contrib/src/wasmtime/Makefile
@@ -0,0 +1,30 @@
+# wasmtime
+
+include $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/version
+WASMTIME_URL := https://github.com/bytecodealliance/wasmtime/releases/download/v$(WASMTIME_VERSION)/wasmtime-v$(WASMTIME_VERSION)-src.tar.gz
+
+PKGS += wasmtime
+
+ifeq ($(shell which cargo >/dev/null 2>&1 || echo FAIL),)
+CARGO = cargo
+else
+CARGO = $(error Cargo (Rust package manager) not found)
+endif
+
+ifeq ($(shell uname -s),Linux)
+WASMTIME_ARGS=-Clink-arg=-Wl,-soname,libwasmtime.so
+endif
+
+$(TARBALLS)/wasmtime-v$(WASMTIME_VERSION)-src.tar.gz:
+ $(call download_pkg,$(WASMTIME_URL),wasmtime)
+
+.sum-wasmtime: wasmtime-v$(WASMTIME_VERSION)-src.tar.gz
+
+wasmtime: wasmtime-v$(WASMTIME_VERSION)-src.tar.gz .sum-wasmtime
+ $(UNPACK)
+ $(MOVE)
+
+.wasmtime: wasmtime
+ cd $< && $(CARGO) rustc --release -p wasmtime-c-api -- $(WASMTIME_ARGS)
+ cp $</crates/c-api/wasm-c-api/include/wasm.h $</crates/c-api/include/
+ touch $@
diff --git a/pkg/contrib/src/wasmtime/SHA512SUMS b/pkg/contrib/src/wasmtime/SHA512SUMS
new file mode 100644
index 00000000..35e0e47f
--- /dev/null
+++ b/pkg/contrib/src/wasmtime/SHA512SUMS
@@ -0,0 +1 @@
+4b67ba0742da0558efffe1dbde5512dc5f0201fad25f1027d277758e76778b2add11528dbe3f5b7759f2386859b52aea3a0526abaa481c2ed91eb56c5a531b49 wasmtime-v11.0.1-src.tar.gz
diff --git a/pkg/contrib/src/wasmtime/version b/pkg/contrib/src/wasmtime/version
new file mode 100644
index 00000000..1debf1ff
--- /dev/null
+++ b/pkg/contrib/src/wasmtime/version
@@ -0,0 +1 @@
+WASMTIME_VERSION := 11.0.1
diff --git a/pkg/deb/Makefile b/pkg/deb/Makefile
index 044b7226..f82441c6 100644
--- a/pkg/deb/Makefile
+++ b/pkg/deb/Makefile
@@ -32,6 +32,7 @@ include Makefile.jsc17
include Makefile.jsc18
include Makefile.jsc19
include Makefile.jsc20
+include Makefile.wasm
endif
# Ubuntu 22.10
@@ -47,6 +48,7 @@ include Makefile.jsc11
include Makefile.jsc17
include Makefile.jsc18
include Makefile.jsc19
+include Makefile.wasm
endif
# Ubuntu 22.04
@@ -61,6 +63,7 @@ include Makefile.jsc-common
include Makefile.jsc11
include Makefile.jsc17
include Makefile.jsc18
+include Makefile.wasm
endif
# Ubuntu 21.10
@@ -77,6 +80,7 @@ include Makefile.jsc11
include Makefile.jsc16
include Makefile.jsc17
include Makefile.jsc18
+include Makefile.wasm
endif
# Ubuntu 20.04
@@ -89,6 +93,7 @@ include Makefile.perl
include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc11
+include Makefile.wasm
endif
# Ubuntu 18.04
@@ -104,6 +109,19 @@ include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc8
include Makefile.jsc11
+include Makefile.wasm
+endif
+
+# Debian 12
+ifeq ($(CODENAME),bookworm)
+include Makefile.php
+include Makefile.python311
+include Makefile.go
+include Makefile.perl
+include Makefile.ruby
+include Makefile.jsc-common
+include Makefile.jsc17
+include Makefile.wasm
endif
# Debian 12
@@ -127,6 +145,7 @@ include Makefile.perl
include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc11
+include Makefile.wasm
endif
# Debian 10
@@ -139,13 +158,16 @@ include Makefile.perl
include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc11
+include Makefile.wasm
endif
CONFIGURE_ARGS_COMMON=\
--prefix=/usr \
--statedir=/var/lib/unit \
--control="unix:/var/run/control.unit.sock" \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -232,7 +254,7 @@ debuild/unit_$(VERSION).orig.tar.gz: | debuild/$(SRCDIR)/debian
unit: check-build-depends-unit debuild/unit_$(VERSION).orig.tar.gz debuild/$(SRCDIR)/debian/changelog
@echo "===> Building $@ package"
- cd debuild/$(SRCDIR) && debuild -us -uc
+ cd debuild/$(SRCDIR) && debuild --preserve-envvar PATH --preserve-envvar RUSTUP_HOME -us -uc
mkdir -p debs
find debuild/ -maxdepth 1 -type f -exec cp {} debs/ \;
ln -s debuild/$(SRCDIR)/build $@
@@ -305,7 +327,7 @@ endif
unit-%: check-build-depends-% | debuild-%
@echo "===> Building $@ package"
- cd debuild-$*/$(SRCDIR) && debuild -us -uc
+ cd debuild-$*/$(SRCDIR) && debuild --preserve-envvar PATH --preserve-envvar RUSTUP_HOME -us -uc
mkdir -p debs
find debuild-$*/ -maxdepth 1 -type f -exec cp {} debs/ \;
ln -s debuild-$*/$(SRCDIR)/build $@
diff --git a/pkg/deb/Makefile.wasm b/pkg/deb/Makefile.wasm
new file mode 100644
index 00000000..da028f19
--- /dev/null
+++ b/pkg/deb/Makefile.wasm
@@ -0,0 +1,47 @@
+MODULES+= wasm
+MODULE_SUFFIX_wasm= wasm
+
+MODULE_SUMMARY_wasm= WASM module for NGINX Unit
+
+MODULE_VERSION_wasm= $(VERSION)
+MODULE_RELEASE_wasm= 1
+
+MODULE_CONFARGS_wasm= wasm --include-path=\$$(CURDIR)/pkg/contrib/wasmtime/crates/c-api/include --lib-path=\$$(CURDIR)/pkg/contrib/wasmtime/target/release
+MODULE_MAKEARGS_wasm= wasm
+MODULE_INSTARGS_wasm= wasm-install
+
+MODULE_SOURCES_wasm=
+
+BUILD_DEPENDS_wasm=
+MODULE_BUILD_DEPENDS_wasm=
+MODULE_DEPENDS_wasm=
+
+BUILD_DEPENDS+= $(BUILD_DEPENDS_wasm)
+
+define MODULE_PREBUILD_wasm
+ \$$(MAKE) -C pkg/contrib .wasmtime
+endef
+export MODULE_PREBUILD_wasm
+
+define MODULE_PREINSTALL_wasm
+endef
+export MODULE_PREINSTALL_wasm
+
+define MODULE_POSTINSTALL_wasm
+ mkdir -p debian/unit-wasm/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/
+ install -m 755 -p pkg/contrib/wasmtime/target/release/libwasmtime.so debian/unit-wasm/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/
+endef
+export MODULE_POSTINSTALL_wasm
+
+define MODULE_POST_wasm
+cat <<BANNER
+----------------------------------------------------------------------
+
+The $(MODULE_SUMMARY_wasm) has been installed.
+
+Online documentation is available at https://unit.nginx.org
+
+----------------------------------------------------------------------
+BANNER
+endef
+export MODULE_POST_wasm
diff --git a/pkg/deb/debian.module/copyright.unit-jsc11 b/pkg/deb/debian.module/copyright.unit-jsc11
index b2e4a117..e11b64d3 100644
--- a/pkg/deb/debian.module/copyright.unit-jsc11
+++ b/pkg/deb/debian.module/copyright.unit-jsc11
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/pkg/deb/debian.module/copyright.unit-jsc8 b/pkg/deb/debian.module/copyright.unit-jsc8
index 1dab9cce..1d267021 100644
--- a/pkg/deb/debian.module/copyright.unit-jsc8
+++ b/pkg/deb/debian.module/copyright.unit-jsc8
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/pkg/deb/debian.module/rules.in b/pkg/deb/debian.module/rules.in
index 861a9c00..7814fbfd 100755
--- a/pkg/deb/debian.module/rules.in
+++ b/pkg/deb/debian.module/rules.in
@@ -17,6 +17,7 @@ BASEDIR = $(CURDIR)
config.env.%:
dh_testdir
+%%MODULE_PREBUILD%%
mkdir -p $(BUILDDIR_$*)
cp -Pa $(CURDIR)/auto $(BUILDDIR_$*)/
cp -Pa $(CURDIR)/configure $(BUILDDIR_$*)/
diff --git a/pkg/deb/debian/control.in b/pkg/deb/debian/control.in
index 579f41e3..bc757233 100644
--- a/pkg/deb/debian/control.in
+++ b/pkg/deb/debian/control.in
@@ -6,7 +6,9 @@ Build-Depends: debhelper (>= 11),
linux-libc-dev,
libssl-dev,
libpcre2-dev,
- pkg-config
+ pkg-config,
+ clang,
+ llvm
Standards-Version: 4.1.4
Homepage: https://unit.nginx.org
diff --git a/pkg/deb/debian/copyright b/pkg/deb/debian/copyright
index 487c92c5..692ae2e0 100644
--- a/pkg/deb/debian/copyright
+++ b/pkg/deb/debian/copyright
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/pkg/deb/debian/dirs b/pkg/deb/debian/dirs
index 2568bac1..552cdf7c 100644
--- a/pkg/deb/debian/dirs
+++ b/pkg/deb/debian/dirs
@@ -1,4 +1,5 @@
usr/bin
usr/sbin
usr/lib/unit
+usr/include/unit
var/lib/unit
diff --git a/pkg/deb/debian/rules.in b/pkg/deb/debian/rules.in
index 0d7cf830..55a4ebec 100644
--- a/pkg/deb/debian/rules.in
+++ b/pkg/deb/debian/rules.in
@@ -25,6 +25,11 @@ njs:
cd pkg/contrib && make .njs
touch $@
+libunit-wasm:
+ dh_testdir
+ cd pkg/contrib && make .libunit-wasm
+ touch $@
+
config.env.%: njs
dh_testdir
mkdir -p $(BUILDDIR_$*)
@@ -92,7 +97,7 @@ build-arch: build-arch.unit build-arch.unit_debug
dh_testdir
touch $@
-build: build-arch build-indep
+build: build-arch build-indep libunit-wasm
dh_testdir
touch $@
@@ -123,6 +128,9 @@ install: build do.tests
install -m 644 README.md $(INSTALLDIR)/usr/share/doc/unit/
install -m 644 CONTRIBUTING.md $(INSTALLDIR)/usr/share/doc/unit/
install -m 644 NOTICE $(INSTALLDIR)/usr/share/doc/unit/
+ mkdir -p $(INSTALLDIR_dev)/usr/include/unit
+ install -m644 $(CURDIR)/pkg/contrib/libunit-wasm/src/c/libunit-wasm.a $(INSTALLDIR_dev)/usr/lib/$(DEB_HOST_MULTIARCH)/libunit-wasm.a
+ install -m644 $(CURDIR)/pkg/contrib/libunit-wasm/src/c/include/unit/unit-wasm.h $(INSTALLDIR_dev)/usr/include/unit/
binary-indep: build install
dh_testdir
diff --git a/pkg/docker/Dockerfile.go1.20 b/pkg/docker/Dockerfile.go1.20
index 50b4d5b3..98f6d92f 100644
--- a/pkg/docker/Dockerfile.go1.20
+++ b/pkg/docker/Dockerfile.go1.20
@@ -1,19 +1,21 @@
FROM golang:1.20-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (go1.20)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure go --go-path=$GOPATH \
&& make -j $NCPU go-install-src libunit-install \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure go --go-path=$GOPATH \
&& make -j $NCPU go-install-src libunit-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& /bin/true \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.go1.21 b/pkg/docker/Dockerfile.go1.21
new file mode 100644
index 00000000..18d3cc7b
--- /dev/null
+++ b/pkg/docker/Dockerfile.go1.21
@@ -0,0 +1,89 @@
+FROM golang:1.21-bullseye
+
+LABEL org.opencontainers.image.title="Unit (go1.21)"
+LABEL org.opencontainers.image.description="Official build of Unit for Docker."
+LABEL org.opencontainers.image.url="https://unit.nginx.org"
+LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
+LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
+LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
+LABEL org.opencontainers.image.version="1.31.0"
+
+RUN set -ex \
+ && savedAptMark="$(apt-mark showmanual)" \
+ && apt-get update \
+ && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
+ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
+ && cd unit \
+ && NCPU="$(getconf _NPROCESSORS_ONLN)" \
+ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
+ && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \
+ && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \
+ && CONFIGURE_ARGS_MODULES="--prefix=/usr \
+ --statedir=/var/lib/unit \
+ --control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
+ --pid=/var/run/unit.pid \
+ --logdir=/var/log \
+ --log=/var/log/unit.log \
+ --tmpdir=/var/tmp \
+ --user=unit \
+ --group=unit \
+ --openssl \
+ --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \
+ && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \
+ --njs" \
+ && make -j $NCPU -C pkg/contrib .njs \
+ && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd \
+ && make clean \
+ && /bin/true \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && ./configure go --go-path=$GOPATH \
+ && make -j $NCPU go-install-src libunit-install \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
+ && ./configure go --go-path=$GOPATH \
+ && make -j $NCPU go-install-src libunit-install \
+ && cd \
+ && rm -rf /usr/src/unit \
+ && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
+ ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
+ done \
+ && apt-mark showmanual | xargs apt-mark auto > /dev/null \
+ && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
+ && /bin/true \
+ && mkdir -p /var/lib/unit/ \
+ && mkdir -p /docker-entrypoint.d/ \
+ && groupadd --gid 999 unit \
+ && useradd \
+ --uid 999 \
+ --gid unit \
+ --no-create-home \
+ --home /nonexistent \
+ --comment "unit user" \
+ --shell /bin/false \
+ unit \
+ && apt-get update \
+ && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
+ && apt-get purge -y --auto-remove build-essential \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /requirements.apt \
+ && ln -sf /dev/stdout /var/log/unit.log
+
+COPY docker-entrypoint.sh /usr/local/bin/
+COPY welcome.* /usr/share/unit/welcome/
+
+STOPSIGNAL SIGTERM
+
+ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
+EXPOSE 80
+CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"]
diff --git a/pkg/docker/Dockerfile.jsc11 b/pkg/docker/Dockerfile.jsc11
index bd987ae5..e7a057bd 100644
--- a/pkg/docker/Dockerfile.jsc11
+++ b/pkg/docker/Dockerfile.jsc11
@@ -1,19 +1,21 @@
FROM eclipse-temurin:11-jdk-jammy
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (jsc11)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure java --jars=/usr/share/unit-jsc-common/ \
&& make -j $NCPU java-shared-install java-install \
@@ -49,15 +54,15 @@ RUN set -ex \
&& ./configure java --jars=/usr/share/unit-jsc-common/ \
&& make -j $NCPU java-shared-install java-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
&& apt-mark showmanual | xargs apt-mark auto > /dev/null \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
- && /bin/true \
+ && rm -rf /root/.m2 \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.minimal b/pkg/docker/Dockerfile.minimal
index 06a85b22..8c5ce0d5 100644
--- a/pkg/docker/Dockerfile.minimal
+++ b/pkg/docker/Dockerfile.minimal
@@ -1,19 +1,21 @@
FROM debian:bullseye-slim
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (minimal)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure \
&& make -j $NCPU version \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure \
&& make -j $NCPU version \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& /bin/true \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.node18 b/pkg/docker/Dockerfile.node18
index b3fb46d3..735342dd 100644
--- a/pkg/docker/Dockerfile.node18
+++ b/pkg/docker/Dockerfile.node18
@@ -1,19 +1,21 @@
FROM node:18-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (node18)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,23 +45,24 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && npm -g install node-gyp \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
- && ./configure nodejs --node-gyp=/usr/local/lib/node_modules/npm/bin/node-gyp-bin/node-gyp \
+ && ./configure nodejs --node-gyp=/usr/local/bin/node-gyp \
&& make -j $NCPU node node-install libunit-install \
&& make clean \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
- && ./configure nodejs --node-gyp=/usr/local/lib/node_modules/npm/bin/node-gyp-bin/node-gyp \
+ && ./configure nodejs --node-gyp=/usr/local/bin/node-gyp \
&& make -j $NCPU node node-install libunit-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
&& apt-mark showmanual | xargs apt-mark auto > /dev/null \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
- && /bin/true \
+ && rm -rf /root/.cache/ && rm -rf /root/.npm \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.node20 b/pkg/docker/Dockerfile.node20
new file mode 100644
index 00000000..f291ccfc
--- /dev/null
+++ b/pkg/docker/Dockerfile.node20
@@ -0,0 +1,89 @@
+FROM node:20-bullseye
+
+LABEL org.opencontainers.image.title="Unit (node20)"
+LABEL org.opencontainers.image.description="Official build of Unit for Docker."
+LABEL org.opencontainers.image.url="https://unit.nginx.org"
+LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
+LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
+LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
+LABEL org.opencontainers.image.version="1.31.0"
+
+RUN set -ex \
+ && savedAptMark="$(apt-mark showmanual)" \
+ && apt-get update \
+ && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
+ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
+ && cd unit \
+ && NCPU="$(getconf _NPROCESSORS_ONLN)" \
+ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
+ && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \
+ && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \
+ && CONFIGURE_ARGS_MODULES="--prefix=/usr \
+ --statedir=/var/lib/unit \
+ --control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
+ --pid=/var/run/unit.pid \
+ --logdir=/var/log \
+ --log=/var/log/unit.log \
+ --tmpdir=/var/tmp \
+ --user=unit \
+ --group=unit \
+ --openssl \
+ --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \
+ && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \
+ --njs" \
+ && make -j $NCPU -C pkg/contrib .njs \
+ && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd \
+ && make clean \
+ && npm -g install node-gyp \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && ./configure nodejs --node-gyp=/usr/local/bin/node-gyp \
+ && make -j $NCPU node node-install libunit-install \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
+ && ./configure nodejs --node-gyp=/usr/local/bin/node-gyp \
+ && make -j $NCPU node node-install libunit-install \
+ && cd \
+ && rm -rf /usr/src/unit \
+ && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
+ ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
+ done \
+ && apt-mark showmanual | xargs apt-mark auto > /dev/null \
+ && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
+ && rm -rf /root/.cache/ && rm -rf /root/.npm \
+ && mkdir -p /var/lib/unit/ \
+ && mkdir -p /docker-entrypoint.d/ \
+ && groupadd --gid 999 unit \
+ && useradd \
+ --uid 999 \
+ --gid unit \
+ --no-create-home \
+ --home /nonexistent \
+ --comment "unit user" \
+ --shell /bin/false \
+ unit \
+ && apt-get update \
+ && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
+ && apt-get purge -y --auto-remove build-essential \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /requirements.apt \
+ && ln -sf /dev/stdout /var/log/unit.log
+
+COPY docker-entrypoint.sh /usr/local/bin/
+COPY welcome.* /usr/share/unit/welcome/
+
+STOPSIGNAL SIGTERM
+
+ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
+EXPOSE 80
+CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"]
diff --git a/pkg/docker/Dockerfile.perl5.36 b/pkg/docker/Dockerfile.perl5.36
index 2dc31e53..2db7506d 100644
--- a/pkg/docker/Dockerfile.perl5.36
+++ b/pkg/docker/Dockerfile.perl5.36
@@ -1,19 +1,21 @@
FROM perl:5.36-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (perl5.36)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure perl \
&& make -j $NCPU perl-install \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure perl \
&& make -j $NCPU perl-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& /bin/true \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.perl5.38 b/pkg/docker/Dockerfile.perl5.38
new file mode 100644
index 00000000..bd653cb1
--- /dev/null
+++ b/pkg/docker/Dockerfile.perl5.38
@@ -0,0 +1,89 @@
+FROM perl:5.38-bullseye
+
+LABEL org.opencontainers.image.title="Unit (perl5.38)"
+LABEL org.opencontainers.image.description="Official build of Unit for Docker."
+LABEL org.opencontainers.image.url="https://unit.nginx.org"
+LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
+LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
+LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
+LABEL org.opencontainers.image.version="1.31.0"
+
+RUN set -ex \
+ && savedAptMark="$(apt-mark showmanual)" \
+ && apt-get update \
+ && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
+ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
+ && cd unit \
+ && NCPU="$(getconf _NPROCESSORS_ONLN)" \
+ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
+ && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \
+ && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \
+ && CONFIGURE_ARGS_MODULES="--prefix=/usr \
+ --statedir=/var/lib/unit \
+ --control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
+ --pid=/var/run/unit.pid \
+ --logdir=/var/log \
+ --log=/var/log/unit.log \
+ --tmpdir=/var/tmp \
+ --user=unit \
+ --group=unit \
+ --openssl \
+ --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \
+ && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \
+ --njs" \
+ && make -j $NCPU -C pkg/contrib .njs \
+ && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd \
+ && make clean \
+ && /bin/true \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && ./configure perl \
+ && make -j $NCPU perl-install \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
+ && ./configure perl \
+ && make -j $NCPU perl-install \
+ && cd \
+ && rm -rf /usr/src/unit \
+ && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
+ ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
+ done \
+ && apt-mark showmanual | xargs apt-mark auto > /dev/null \
+ && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
+ && /bin/true \
+ && mkdir -p /var/lib/unit/ \
+ && mkdir -p /docker-entrypoint.d/ \
+ && groupadd --gid 999 unit \
+ && useradd \
+ --uid 999 \
+ --gid unit \
+ --no-create-home \
+ --home /nonexistent \
+ --comment "unit user" \
+ --shell /bin/false \
+ unit \
+ && apt-get update \
+ && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
+ && apt-get purge -y --auto-remove build-essential \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /requirements.apt \
+ && ln -sf /dev/stdout /var/log/unit.log
+
+COPY docker-entrypoint.sh /usr/local/bin/
+COPY welcome.* /usr/share/unit/welcome/
+
+STOPSIGNAL SIGTERM
+
+ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
+EXPOSE 80
+CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"]
diff --git a/pkg/docker/Dockerfile.php8.2 b/pkg/docker/Dockerfile.php8.2
index fcf3f59e..bd27a4dd 100644
--- a/pkg/docker/Dockerfile.php8.2
+++ b/pkg/docker/Dockerfile.php8.2
@@ -1,19 +1,21 @@
FROM php:8.2-cli-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (php8.2)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure php \
&& make -j $NCPU php-install \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure php \
&& make -j $NCPU php-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& ldconfig \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.python3.11 b/pkg/docker/Dockerfile.python3.11
index 89cd315a..cdc96434 100644
--- a/pkg/docker/Dockerfile.python3.11
+++ b/pkg/docker/Dockerfile.python3.11
@@ -1,19 +1,21 @@
FROM python:3.11-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (python3.11)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure python --config=/usr/local/bin/python3-config \
&& make -j $NCPU python3-install \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure python --config=/usr/local/bin/python3-config \
&& make -j $NCPU python3-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& /bin/true \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.ruby3.2 b/pkg/docker/Dockerfile.ruby3.2
index 4a6b60e4..8b870756 100644
--- a/pkg/docker/Dockerfile.ruby3.2
+++ b/pkg/docker/Dockerfile.ruby3.2
@@ -1,19 +1,21 @@
FROM ruby:3.2-bullseye
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (ruby3.2)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
-LABEL org.opencontainers.image.version="1.30.0"
+LABEL org.opencontainers.image.version="1.31.0"
RUN set -ex \
&& savedAptMark="$(apt-mark showmanual)" \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
- && hg clone -u 1.30.0-1 https://hg.nginx.org/unit \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
&& DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && /bin/true \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure ruby \
&& make -j $NCPU ruby-install \
@@ -49,15 +54,15 @@ RUN set -ex \
&& ./configure ruby \
&& make -j $NCPU ruby-install \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
&& apt-mark showmanual | xargs apt-mark auto > /dev/null \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
- && gem install rack \
+ && gem install rack && rm -rf /root/.local \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/docker/Dockerfile.wasm b/pkg/docker/Dockerfile.wasm
new file mode 100644
index 00000000..852bc5d0
--- /dev/null
+++ b/pkg/docker/Dockerfile.wasm
@@ -0,0 +1,109 @@
+FROM debian:bullseye-slim
+
+LABEL org.opencontainers.image.title="Unit (wasm)"
+LABEL org.opencontainers.image.description="Official build of Unit for Docker."
+LABEL org.opencontainers.image.url="https://unit.nginx.org"
+LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
+LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images"
+LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>"
+LABEL org.opencontainers.image.version="1.31.0"
+
+RUN set -ex \
+ && savedAptMark="$(apt-mark showmanual)" \
+ && apt-get update \
+ && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
+ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
+ && hg clone -u 1.31.0-1 https://hg.nginx.org/unit \
+ && cd unit \
+ && NCPU="$(getconf _NPROCESSORS_ONLN)" \
+ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \
+ && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \
+ && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \
+ && CONFIGURE_ARGS_MODULES="--prefix=/usr \
+ --statedir=/var/lib/unit \
+ --control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
+ --pid=/var/run/unit.pid \
+ --logdir=/var/log \
+ --log=/var/log/unit.log \
+ --tmpdir=/var/tmp \
+ --user=unit \
+ --group=unit \
+ --openssl \
+ --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \
+ && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \
+ --njs" \
+ && make -j $NCPU -C pkg/contrib .njs \
+ && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \
+ && make -j $NCPU unitd \
+ && install -pm755 build/sbin/unitd /usr/sbin/unitd \
+ && make clean \
+ && export RUST_VERSION=1.71.0 \
+ && export RUSTUP_HOME=/usr/src/unit/rustup \
+ && export CARGO_HOME=/usr/src/unit/cargo \
+ && export PATH=/usr/src/unit/cargo/bin:$PATH \
+ && dpkgArch="$(dpkg --print-architecture)" \
+ && case "${dpkgArch##*-}" in \
+ amd64) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db" ;; \
+ arm64) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800" ;; \
+ *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \
+ esac \
+ && url="https://static.rust-lang.org/rustup/archive/1.26.0/${rustArch}/rustup-init" \
+ && curl -L -O "$url" \
+ && echo "${rustupSha256} *rustup-init" | sha256sum -c - \
+ && chmod +x rustup-init \
+ && ./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION --default-host ${rustArch} \
+ && rm rustup-init \
+ && rustup --version \
+ && cargo --version \
+ && rustc --version \
+ && make -C pkg/contrib .wasmtime \
+ && install -pm 755 pkg/contrib/wasmtime/target/release/libwasmtime.so /usr/lib/$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
+ && ./configure wasm --include-path=`pwd`/pkg/contrib/wasmtime/crates/c-api/include --lib-path=/usr/lib/$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ \
+ && make -j $NCPU wasm-install \
+ && make clean \
+ && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \
+ && ./configure wasm --include-path=`pwd`/pkg/contrib/wasmtime/crates/c-api/include --lib-path=/usr/lib/$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ \
+ && make -j $NCPU wasm-install \
+ && cd \
+ && rm -rf /usr/src/unit \
+ && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
+ ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
+ done \
+ && apt-mark showmanual | xargs apt-mark auto > /dev/null \
+ && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
+ && /bin/true \
+ && mkdir -p /var/lib/unit/ \
+ && mkdir -p /docker-entrypoint.d/ \
+ && groupadd --gid 999 unit \
+ && useradd \
+ --uid 999 \
+ --gid unit \
+ --no-create-home \
+ --home /nonexistent \
+ --comment "unit user" \
+ --shell /bin/false \
+ unit \
+ && apt-get update \
+ && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
+ && apt-get purge -y --auto-remove build-essential \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /requirements.apt \
+ && ln -sf /dev/stdout /var/log/unit.log
+
+COPY docker-entrypoint.sh /usr/local/bin/
+COPY welcome.* /usr/share/unit/welcome/
+
+STOPSIGNAL SIGTERM
+
+ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
+EXPOSE 80
+CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"]
diff --git a/pkg/docker/Makefile b/pkg/docker/Makefile
index cb801253..237228a9 100644
--- a/pkg/docker/Makefile
+++ b/pkg/docker/Makefile
@@ -8,7 +8,7 @@ DEFAULT_VERSION := $(NXT_VERSION)
VERSION ?= $(DEFAULT_VERSION)
PATCHLEVEL ?= 1
-MODULES ?= go jsc node perl php python ruby
+MODULES ?= go jsc node perl php python ruby wasm
VARIANT ?= bullseye
@@ -17,34 +17,39 @@ CONTAINER_minimal ?= debian:$(VARIANT)-slim
CONFIGURE_minimal ?=
INSTALL_minimal ?= version
RUN_minimal ?= /bin/true
+MODULE_PREBUILD_minimal ?= /bin/true
-VERSIONS_go ?= 1.20
+VERSIONS_go ?= 1.20 1.21
VARIANT_go ?= $(VARIANT)
$(foreach goversion, $(VERSIONS_go), $(eval CONTAINER_go$(goversion) = golang:$(goversion)-$(VARIANT_go)))
CONFIGURE_go ?= go --go-path=$$GOPATH
INSTALL_go ?= go-install-src libunit-install
RUN_go ?= /bin/true
+MODULE_PREBUILD_go ?= /bin/true
VERSIONS_jsc ?= 11
VARIANT_jsc ?= jammy
$(foreach jscversion, $(VERSIONS_jsc), $(eval CONTAINER_jsc$(jscversion) = eclipse-temurin:$(jscversion)-jdk-$(VARIANT_jsc)))
CONFIGURE_jsc ?= java --jars=/usr/share/unit-jsc-common/
INSTALL_jsc ?= java-shared-install java-install
-RUN_jsc ?= /bin/true
+RUN_jsc ?= rm -rf /root/.m2
+MODULE_PREBUILD_jsc ?= /bin/true
-VERSIONS_node ?= 18
+VERSIONS_node ?= 18 20
VARIANT_node ?= $(VARIANT)
$(foreach nodeversion, $(VERSIONS_node), $(eval CONTAINER_node$(nodeversion) = node:$(nodeversion)-$(VARIANT_node)))
-CONFIGURE_node ?= nodejs --node-gyp=/usr/local/lib/node_modules/npm/bin/node-gyp-bin/node-gyp
+CONFIGURE_node ?= nodejs --node-gyp=/usr/local/bin/node-gyp
INSTALL_node ?= node node-install libunit-install
-RUN_node ?= /bin/true
+RUN_node ?= rm -rf /root/.cache/ \&\& rm -rf /root/.npm
+MODULE_PREBUILD_node ?= npm -g install node-gyp
-VERSIONS_perl ?= 5.36
+VERSIONS_perl ?= 5.36 5.38
VARIANT_perl ?= $(VARIANT)
$(foreach perlversion, $(VERSIONS_perl), $(eval CONTAINER_perl$(perlversion) = perl:$(perlversion)-$(VARIANT_perl)))
CONFIGURE_perl ?= perl
INSTALL_perl ?= perl-install
RUN_perl ?= /bin/true
+MODULE_PREBUILD_perl ?= /bin/true
VERSIONS_php ?= 8.2
VARIANT_php ?= cli-$(VARIANT)
@@ -52,6 +57,7 @@ $(foreach phpversion, $(VERSIONS_php), $(eval CONTAINER_php$(phpversion) = php:$
CONFIGURE_php ?= php
INSTALL_php ?= php-install
RUN_php ?= ldconfig
+MODULE_PREBUILD_php ?= /bin/true
VERSIONS_python ?= 3.11
VARIANT_python ?= $(VARIANT)
@@ -59,18 +65,49 @@ $(foreach pythonversion, $(VERSIONS_python), $(eval CONTAINER_python$(pythonvers
CONFIGURE_python ?= python --config=/usr/local/bin/python3-config
INSTALL_python ?= python3-install
RUN_python ?= /bin/true
+MODULE_PREBUILD_python ?= /bin/true
VERSIONS_ruby ?= 3.2
VARIANT_ruby ?= $(VARIANT)
$(foreach rubyversion, $(VERSIONS_ruby), $(eval CONTAINER_ruby$(rubyversion) = ruby:$(rubyversion)-$(VARIANT_ruby)))
CONFIGURE_ruby ?= ruby
INSTALL_ruby ?= ruby-install
-RUN_ruby ?= gem install rack
+RUN_ruby ?= gem install rack \&\& rm -rf /root/.local
+MODULE_PREBUILD_ruby ?= /bin/true
+
+VERSIONS_wasm ?=
+CONTAINER_wasm ?= debian:$(VARIANT)-slim
+CONFIGURE_wasm ?= wasm --include-path=\`pwd\`/pkg/contrib/wasmtime/crates/c-api/include --lib-path=/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/
+INSTALL_wasm ?= wasm-install
+RUN_wasm ?= /bin/true
+define MODULE_PREBUILD_wasm
+export RUST_VERSION=1.71.0 \\\n \
+\ \ \ \&\& export RUSTUP_HOME=/usr/src/unit/rustup \\\n \
+\ \ \ \&\& export CARGO_HOME=/usr/src/unit/cargo \\\n \
+\ \ \ \&\& export PATH=/usr/src/unit/cargo/bin:\$$PATH \\\n \
+\ \ \ \&\& dpkgArch="\$$\(dpkg --print-architecture\)" \\\n \
+\ \ \ \&\& case "\$${dpkgArch##*-}" in \\\n \
+\ \ \ \ \ \ amd64\) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db" ;; \\\n \
+\ \ \ \ \ \ arm64\) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800" ;; \\\n \
+\ \ \ \ \ \ *\) echo \>\&2 "unsupported architecture: \$${dpkgArch}"; exit 1 ;; \\\n \
+\ \ \ \esac \\\n \
+\ \ \ \&\& url="https://static.rust-lang.org/rustup/archive/1.26.0/\$${rustArch}/rustup-init" \\\n \
+\ \ \ \&\& curl -L -O "\$$url" \\\n \
+\ \ \ \&\& echo "\$${rustupSha256} *rustup-init" | sha256sum -c - \\\n \
+\ \ \ \&\& chmod +x rustup-init \\\n \
+\ \ \ \&\& ./rustup-init -y --no-modify-path --profile minimal --default-toolchain \$$RUST_VERSION --default-host \$${rustArch} \\\n \
+\ \ \ \&\& rm rustup-init \\\n \
+\ \ \ \&\& rustup --version \\\n \
+\ \ \ \&\& cargo --version \\\n \
+\ \ \ \&\& rustc --version \\\n \
+\ \ \ \&\& make -C pkg/contrib .wasmtime \\\n \
+\ \ \ \&\& install -pm 755 pkg/contrib/wasmtime/target/release/libwasmtime.so /usr/lib/\$$\(dpkg-architecture -q DEB_HOST_MULTIARCH\)/
+endef
default:
@echo "valid targets: all build dockerfiles library clean"
-MODVERSIONS = $(foreach module, $(MODULES), $(foreach modversion, $(shell for v in $(VERSIONS_$(module)); do echo $$v; done | sort -r), $(module)$(modversion))) minimal
+MODVERSIONS = $(foreach module, $(MODULES), $(foreach modversion, $(shell for v in $(VERSIONS_$(module)); do echo $$v; done | sort -r), $(module)$(modversion))) wasm minimal
modname = $(shell echo $1 | /usr/bin/tr -d '.01234567890-')
@@ -86,6 +123,8 @@ Dockerfile.%: ../../version template.Dockerfile
-e 's,@@CONFIGURE@@,$(CONFIGURE_$(call modname, $*)),g' \
-e 's,@@INSTALL@@,$(INSTALL_$(call modname, $*)),g' \
-e 's,@@RUN@@,$(RUN_$(call modname, $*)),g' \
+ -e 's,@@MODULE_PREBUILD@@,$(MODULE_PREBUILD_$(call modname, $*)),g' \
+ -e 's,@@MODULE@@,$*,g' \
> $@
build-%: Dockerfile.%
@@ -103,10 +142,10 @@ library:
modname="$$( echo $$mod | tr -d '.0123456789-' )"; \
TAGS="$$mod $${mod%%.*} $$modname" ; \
TAGS="$$(echo $$TAGS | tr " " "\n" | sort -u -r | tr "\n" "," | sed "s/,/, /g")"; \
- if [ "$$previous" == "$$modname" ]; then \
+ if [ "$$previous" = "$$modname" ]; then \
echo "Tags: $(VERSION)-$$mod, $$mod"; \
else \
- if [ "$$mod" == "minimal" ]; then \
+ if [ "$$mod" = "minimal" ]; then \
echo "Tags: $(VERSION)-$$mod, $${TAGS%, }, latest"; \
else \
echo "Tags: $(VERSION)-$$mod, $${TAGS%, }"; \
@@ -120,6 +159,11 @@ library:
previous=$$(echo $$mod | tr -d '.0123456789-'); \
done
+diff: $(addprefix diff-, $(MODVERSIONS))
+
+diff-%:
+ @echo container-diff diff --type file daemon://$(CONTAINER_$*) daemon://unit:$(VERSION)-$*
+
all: $(addprefix Dockerfile., $(MODVERSIONS))
clean:
diff --git a/pkg/docker/template.Dockerfile b/pkg/docker/template.Dockerfile
index c6a72aa8..4d5cc101 100644
--- a/pkg/docker/template.Dockerfile
+++ b/pkg/docker/template.Dockerfile
@@ -1,6 +1,6 @@
FROM @@CONTAINER@@
-LABEL org.opencontainers.image.title="Unit"
+LABEL org.opencontainers.image.title="Unit (@@MODULE@@)"
LABEL org.opencontainers.image.description="Official build of Unit for Docker."
LABEL org.opencontainers.image.url="https://unit.nginx.org"
LABEL org.opencontainers.image.source="https://github.com/nginx/unit"
@@ -13,6 +13,8 @@ RUN set -ex \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates mercurial build-essential libssl-dev libpcre2-dev curl pkg-config \
&& mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \
+ && mkdir -p /usr/src/unit \
+ && cd /usr/src/unit \
&& hg clone -u @@VERSION@@-@@PATCHLEVEL@@ https://hg.nginx.org/unit \
&& cd unit \
&& NCPU="$(getconf _NPROCESSORS_ONLN)" \
@@ -22,7 +24,9 @@ RUN set -ex \
&& CONFIGURE_ARGS_MODULES="--prefix=/usr \
--statedir=/var/lib/unit \
--control=unix:/var/run/control.unit.sock \
+ --runstatedir=/var/run \
--pid=/var/run/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -41,6 +45,7 @@ RUN set -ex \
&& make -j $NCPU unitd \
&& install -pm755 build/sbin/unitd /usr/sbin/unitd \
&& make clean \
+ && @@MODULE_PREBUILD@@ \
&& ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \
&& ./configure @@CONFIGURE@@ \
&& make -j $NCPU @@INSTALL@@ \
@@ -49,7 +54,7 @@ RUN set -ex \
&& ./configure @@CONFIGURE@@ \
&& make -j $NCPU @@INSTALL@@ \
&& cd \
- && rm -rf unit \
+ && rm -rf /usr/src/unit \
&& for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \
ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \
done \
@@ -57,7 +62,7 @@ RUN set -ex \
&& { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \
&& @@RUN@@ \
&& mkdir -p /var/lib/unit/ \
- && mkdir /docker-entrypoint.d/ \
+ && mkdir -p /docker-entrypoint.d/ \
&& groupadd --gid 999 unit \
&& useradd \
--uid 999 \
@@ -69,7 +74,7 @@ RUN set -ex \
unit \
&& apt-get update \
&& apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \
- && apt-get purge -y --auto-remove \
+ && apt-get purge -y --auto-remove build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /requirements.apt \
&& ln -sf /dev/stdout /var/log/unit.log
diff --git a/pkg/rpm/Makefile b/pkg/rpm/Makefile
index d3cc34bd..355f8a59 100644
--- a/pkg/rpm/Makefile
+++ b/pkg/rpm/Makefile
@@ -64,6 +64,7 @@ include Makefile.perl
include Makefile.jsc-common
include Makefile.jsc8
include Makefile.jsc11
+include Makefile.wasm
endif
ifeq ($(OSVER), centos9)
@@ -74,6 +75,7 @@ include Makefile.perl
include Makefile.jsc-common
include Makefile.jsc8
include Makefile.jsc11
+include Makefile.wasm
endif
ifeq ($(OSVER), amazonlinux2)
@@ -84,6 +86,7 @@ include Makefile.go
include Makefile.perl
include Makefile.jsc-common
include Makefile.jsc8
+include Makefile.wasm
endif
ifeq ($(OSVER), amazonlinux2023)
@@ -94,6 +97,7 @@ include Makefile.go
include Makefile.perl
include Makefile.jsc-common
include Makefile.jsc17
+include Makefile.wasm
endif
ifeq ($(OSVER), fedora)
@@ -105,6 +109,7 @@ include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc8
include Makefile.jsc11
+include Makefile.wasm
endif
ifeq ($(OSVER), fedora37)
@@ -116,13 +121,16 @@ include Makefile.ruby
include Makefile.jsc-common
include Makefile.jsc8
include Makefile.jsc11
+include Makefile.wasm
endif
CONFIGURE_ARGS_COMMON=\
--prefix=/usr \
--statedir=%{_sharedstatedir}/unit \
--control="unix:/var/run/unit/control.sock" \
+ --runstatedir=/var/run \
--pid=/var/run/unit/unit.pid \
+ --logdir=/var/log \
--log=/var/log/unit/unit.log \
--tmpdir=/var/tmp \
--user=unit \
@@ -216,6 +224,7 @@ rpmbuild/SPECS/unit-%.spec: unit.module.spec.in ../../docs/changes.xml | rpmbuil
done ; \
pkgname=$(shell echo $@ | cut -d '/' -f 3 | tr '_' '-' | cut -d '.' -f 1) ; \
definitions=`echo "$$MODULE_DEFINITIONS_$*" | sed -e ':a' -e 'N' -e '$$!ba' -e "s/\n/\$$CR/g"` ; \
+ prebuild=`echo "$$MODULE_PREBUILD_$*" | sed -e ':a' -e 'N' -e '$$!ba' -e "s/\n/\$$CR/g"` ; \
preinstall=`echo "$$MODULE_PREINSTALL_$*" | sed -e ':a' -e 'N' -e '$$!ba' -e "s/\n/\$$CR/g"` ; \
postinstall=`echo "$$MODULE_POSTINSTALL_$*" | sed -e ':a' -e 'N' -e '$$!ba' -e "s/\n/\$$CR/g"` ; \
files=`echo "$$MODULE_FILES_$*" | sed -e ':a' -e 'N' -e '$$!ba' -e "s/\n/\$$CR/g"` ; \
@@ -234,6 +243,7 @@ rpmbuild/SPECS/unit-%.spec: unit.module.spec.in ../../docs/changes.xml | rpmbuil
-e "s#%%MODULE_MAKEARGS%%#$(MODULE_MAKEARGS_$*)#g" \
-e "s#%%MODULE_INSTARGS%%#$(MODULE_INSTARGS_$*)#g" \
-e "s#%%MODULE_DEFINITIONS%%#$${definitions}#g" \
+ -e "s#%%MODULE_PREBUILD%%#$${prebuild}#g" \
-e "s#%%MODULE_PREINSTALL%%#$${preinstall}#g" \
-e "s#%%MODULE_POSTINSTALL%%#$${postinstall}#g" \
-e "s#%%MODULE_FILES%%#$${files}#g" \
diff --git a/pkg/rpm/Makefile.wasm b/pkg/rpm/Makefile.wasm
new file mode 100644
index 00000000..c638071b
--- /dev/null
+++ b/pkg/rpm/Makefile.wasm
@@ -0,0 +1,51 @@
+MODULES+= wasm
+MODULE_SUFFIX_wasm= wasm
+
+MODULE_SUMMARY_wasm= WASM module for NGINX Unit
+
+MODULE_VERSION_wasm= $(VERSION)
+MODULE_RELEASE_wasm= 1
+
+MODULE_CONFARGS_wasm= wasm --include-path=\`pwd\`/pkg/contrib/wasmtime/crates/c-api/include --lib-path=\`pwd\`/pkg/contrib/wasmtime/target/release
+MODULE_MAKEARGS_wasm= wasm
+MODULE_INSTARGS_wasm= wasm-install
+
+MODULE_SOURCES_wasm=
+
+BUILD_DEPENDS_wasm=
+
+BUILD_DEPENDS+= $(BUILD_DEPENDS_wasm)
+
+define MODULE_PREBUILD_wasm
+%{__make} -C pkg/contrib .wasmtime
+endef
+export MODULE_PREBUILD_wasm
+
+define MODULE_PREINSTALL_wasm
+endef
+export MODULE_PREINSTALL_wasm
+
+define MODULE_POSTINSTALL_wasm
+%{__install} -m 755 -p pkg/contrib/wasmtime/target/release/libwasmtime.so %{buildroot}%{_libdir}/
+endef
+export MODULE_POSTINSTALL_wasm
+
+define MODULE_FILES_wasm
+%{_libdir}/libwasmtime.so
+%{_libdir}/unit/modules/*
+%{_libdir}/unit/debug-modules/*
+endef
+export MODULE_FILES_wasm
+
+define MODULE_POST_wasm
+cat <<BANNER
+----------------------------------------------------------------------
+
+The $(MODULE_SUMMARY_wasm) has been installed.
+
+Online documentation is available at https://unit.nginx.org
+
+----------------------------------------------------------------------
+BANNER
+endef
+export MODULE_POST_wasm
diff --git a/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc11 b/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc11
index b08fcc34..4505b5b5 100644
--- a/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc11
+++ b/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc11
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc8 b/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc8
index 5e31863d..59891951 100644
--- a/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc8
+++ b/pkg/rpm/rpmbuild/SOURCES/COPYRIGHT.unit-jsc8
@@ -1,12 +1,15 @@
NGINX Unit.
- Copyright 2017-2022 NGINX, Inc.
+ Copyright 2017-2023 NGINX, Inc.
+ Copyright 2017-2023 Andrei Zeliankou
+ Copyright 2018-2023 Konstantin Pavlov
+ Copyright 2021-2023 Zhidao Hong
+ Copyright 2021-2023 Alejandro Colomar
+ Copyright 2022-2023 Andrew Clayton
+ Copyright 2022-2023 Liam Crilly
Copyright 2017-2022 Valentin V. Bartenev
Copyright 2017-2022 Max Romanov
- Copyright 2017-2022 Andrei Zeliankou
- Copyright 2018-2022 Konstantin Pavlov
- Copyright 2021-2022 Zhidao Hong
Copyright 2021-2022 Oisín Canty
Copyright 2017-2021 Igor Sysoev
Copyright 2017-2021 Andrei Belov
diff --git a/pkg/rpm/unit.module.spec.in b/pkg/rpm/unit.module.spec.in
index 04323afc..b3d5d94b 100644
--- a/pkg/rpm/unit.module.spec.in
+++ b/pkg/rpm/unit.module.spec.in
@@ -59,6 +59,7 @@ This package contains %%SUMMARY%%.
tar --strip-components=1 -zxf %{SOURCE0}
%build
+%%MODULE_PREBUILD%%
./configure \
%{CONFIGURE_ARGS} \
--modulesdir=%{_libdir}/unit/debug-modules \
diff --git a/pkg/rpm/unit.spec.in b/pkg/rpm/unit.spec.in
index 14a2ea00..01323650 100644
--- a/pkg/rpm/unit.spec.in
+++ b/pkg/rpm/unit.spec.in
@@ -47,6 +47,8 @@ Requires(postun): systemd
BuildRequires: pcre2-devel
BuildRequires: pkgconfig
+BuildRequires: clang
+BuildRequires: llvm
Provides: unit-r%{version}
@@ -99,6 +101,10 @@ PKG_CONFIG_PATH=%{bdir}/pkg/contrib/njs/build \
%{__make} %{?_smp_mflags}
%{__mv} build build-nodebug
+%if (0%{?fedora}) || (0%{?rhel} >= 8) || (0%{?amzn2})
+%{__make} %{?_smp_mflags} -C pkg/contrib .libunit-wasm
+%endif
+
%install
%{__rm} -rf %{buildroot}
%{__ln_s} build-nodebug build
@@ -136,6 +142,12 @@ DESTDIR=%{buildroot} make unitd-install libunit-install manpage-install
%{__install} -p -D -m 0644 %{SOURCE1} %{buildroot}%{_unitdir}/unit.service
%{__install} -p -D -m 0644 %{SOURCE2} %{buildroot}%{_unitdir}/unit-debug.service
+%if (0%{?fedora}) || (0%{?rhel} >= 8) || (0%{?amzn2})
+%{__mkdir} -p %{buildroot}%{_includedir}/unit/
+%{__install} -m 644 pkg/contrib/libunit-wasm/src/c/libunit-wasm.a %{buildroot}%{_libdir}/
+%{__install} -m 644 pkg/contrib/libunit-wasm/src/c/include/unit/unit-wasm.h %{buildroot}%{_includedir}/unit/
+%endif
+
QA_SKIP_BUILD_ROOT=1
export QA_SKIP_BUILD_ROOT
@@ -221,6 +233,12 @@ BANNER
%files devel
%{_libdir}/libunit.a
%{_libdir}/libunit-debug.a
+%if (0%{?fedora}) || (0%{?rhel} >= 8) || (0%{?amzn2})
+%{_libdir}/libunit-wasm.a
+%dir %{_includedir}/unit
+%{_includedir}/unit/*.h
+%endif
%{_includedir}/nxt_*.h
+%{_datadir}/pkgconfig/unit.pc
%changelog
diff --git a/src/nxt_application.c b/src/nxt_application.c
index ffa8eb53..872e387a 100644
--- a/src/nxt_application.c
+++ b/src/nxt_application.c
@@ -1099,6 +1099,9 @@ nxt_app_parse_type(u_char *p, size_t length)
} else if (nxt_str_eq(&str, "java", 4)) {
return NXT_APP_JAVA;
+
+ } else if (nxt_str_eq(&str, "wasm", 4)) {
+ return NXT_APP_WASM;
}
return NXT_APP_UNKNOWN;
diff --git a/src/nxt_application.h b/src/nxt_application.h
index 2675e6a0..64866db6 100644
--- a/src/nxt_application.h
+++ b/src/nxt_application.h
@@ -21,6 +21,7 @@ typedef enum {
NXT_APP_PERL,
NXT_APP_RUBY,
NXT_APP_JAVA,
+ NXT_APP_WASM,
NXT_APP_UNKNOWN,
} nxt_app_type_t;
@@ -86,6 +87,23 @@ typedef struct {
} nxt_java_app_conf_t;
+typedef struct {
+ const char *module;
+
+ const char *request_handler;
+ const char *malloc_handler;
+ const char *free_handler;
+
+ const char *module_init_handler;
+ const char *module_end_handler;
+ const char *request_init_handler;
+ const char *request_end_handler;
+ const char *response_end_handler;
+
+ nxt_conf_value_t *access;
+} nxt_wasm_app_conf_t;
+
+
struct nxt_common_app_conf_s {
nxt_str_t name;
nxt_str_t type;
@@ -114,6 +132,7 @@ struct nxt_common_app_conf_s {
nxt_perl_app_conf_t perl;
nxt_ruby_app_conf_t ruby;
nxt_java_app_conf_t java;
+ nxt_wasm_app_conf_t wasm;
} u;
nxt_conf_value_t *self;
diff --git a/src/nxt_conf_validation.c b/src/nxt_conf_validation.c
index 8c75a9fe..f00b28b8 100644
--- a/src/nxt_conf_validation.c
+++ b/src/nxt_conf_validation.c
@@ -167,6 +167,8 @@ static nxt_int_t nxt_conf_vldt_match_addrs(nxt_conf_validation_t *vldt,
nxt_conf_value_t *value, void *data);
static nxt_int_t nxt_conf_vldt_match_addr(nxt_conf_validation_t *vldt,
nxt_conf_value_t *value);
+static nxt_int_t nxt_conf_vldt_response_header(nxt_conf_validation_t *vldt,
+ nxt_str_t *name, nxt_conf_value_t *value);
static nxt_int_t nxt_conf_vldt_app_name(nxt_conf_validation_t *vldt,
nxt_conf_value_t *value, void *data);
static nxt_int_t nxt_conf_vldt_forwarded(nxt_conf_validation_t *vldt,
@@ -250,6 +252,7 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_python_target_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_php_common_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_php_options_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_php_target_members[];
+static nxt_conf_vldt_object_t nxt_conf_vldt_wasm_access_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_common_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_app_limits_members[];
static nxt_conf_vldt_object_t nxt_conf_vldt_app_processes_members[];
@@ -688,6 +691,12 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_action_common_members[] = {
.name = nxt_string("rewrite"),
.type = NXT_CONF_VLDT_STRING,
},
+ {
+ .name = nxt_string("response_headers"),
+ .type = NXT_CONF_VLDT_OBJECT,
+ .validator = nxt_conf_vldt_object_iterator,
+ .u.object = nxt_conf_vldt_response_header,
+ },
NXT_CONF_VLDT_END
};
@@ -1041,6 +1050,59 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_java_members[] = {
};
+static nxt_conf_vldt_object_t nxt_conf_vldt_wasm_members[] = {
+ {
+ .name = nxt_string("module"),
+ .type = NXT_CONF_VLDT_STRING,
+ .flags = NXT_CONF_VLDT_REQUIRED,
+ }, {
+ .name = nxt_string("request_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ .flags = NXT_CONF_VLDT_REQUIRED,
+ },{
+ .name = nxt_string("malloc_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ .flags = NXT_CONF_VLDT_REQUIRED,
+ }, {
+ .name = nxt_string("free_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ .flags = NXT_CONF_VLDT_REQUIRED,
+ }, {
+ .name = nxt_string("module_init_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ }, {
+ .name = nxt_string("module_end_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ }, {
+ .name = nxt_string("request_init_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ }, {
+ .name = nxt_string("request_end_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ }, {
+ .name = nxt_string("response_end_handler"),
+ .type = NXT_CONF_VLDT_STRING,
+ }, {
+ .name = nxt_string("access"),
+ .type = NXT_CONF_VLDT_OBJECT,
+ .validator = nxt_conf_vldt_object,
+ .u.members = nxt_conf_vldt_wasm_access_members,
+ },
+
+ NXT_CONF_VLDT_NEXT(nxt_conf_vldt_common_members)
+};
+
+
+static nxt_conf_vldt_object_t nxt_conf_vldt_wasm_access_members[] = {
+ {
+ .name = nxt_string("filesystem"),
+ .type = NXT_CONF_VLDT_ARRAY,
+ },
+
+ NXT_CONF_VLDT_END
+};
+
+
static nxt_conf_vldt_object_t nxt_conf_vldt_common_members[] = {
{
.name = nxt_string("type"),
@@ -2448,6 +2510,35 @@ nxt_conf_vldt_object_conf_commands(nxt_conf_validation_t *vldt,
static nxt_int_t
+nxt_conf_vldt_response_header(nxt_conf_validation_t *vldt, nxt_str_t *name,
+ nxt_conf_value_t *value)
+{
+ nxt_uint_t type;
+
+ static nxt_str_t content_length = nxt_string("Content-Length");
+
+ if (name->length == 0) {
+ return nxt_conf_vldt_error(vldt, "The response header name "
+ "must not be empty.");
+ }
+
+ if (nxt_strstr_eq(name, &content_length)) {
+ return nxt_conf_vldt_error(vldt, "The \"Content-Length\" response "
+ "header value is not supported");
+ }
+
+ type = nxt_conf_type(value);
+
+ if (type == NXT_CONF_STRING || type == NXT_CONF_NULL) {
+ return NXT_OK;
+ }
+
+ return nxt_conf_vldt_error(vldt, "The \"%V\" response header value "
+ "must either be a string or a null", name);
+}
+
+
+static nxt_int_t
nxt_conf_vldt_app_name(nxt_conf_validation_t *vldt, nxt_conf_value_t *value,
void *data)
{
@@ -2525,6 +2616,7 @@ nxt_conf_vldt_app(nxt_conf_validation_t *vldt, nxt_str_t *name,
{ nxt_conf_vldt_object, nxt_conf_vldt_perl_members },
{ nxt_conf_vldt_object, nxt_conf_vldt_ruby_members },
{ nxt_conf_vldt_object, nxt_conf_vldt_java_members },
+ { nxt_conf_vldt_object, nxt_conf_vldt_wasm_members },
};
ret = nxt_conf_vldt_type(vldt, name, value, NXT_CONF_VLDT_OBJECT);
diff --git a/src/nxt_h1proto.c b/src/nxt_h1proto.c
index df1f82f9..1dfe4b6e 100644
--- a/src/nxt_h1proto.c
+++ b/src/nxt_h1proto.c
@@ -1284,7 +1284,7 @@ nxt_h1p_request_header_send(nxt_task_t *task, nxt_http_request_t *r,
size += NXT_WEBSOCKET_ACCEPT_SIZE + 2;
} else {
- http11 = (h1p->parser.version.s.minor != '0');
+ http11 = nxt_h1p_is_http11(h1p);
if (r->resp.content_length == NULL || r->resp.content_length->skip) {
diff --git a/src/nxt_h1proto.h b/src/nxt_h1proto.h
index f8500963..b324db8d 100644
--- a/src/nxt_h1proto.h
+++ b/src/nxt_h1proto.h
@@ -51,4 +51,7 @@ struct nxt_h1proto_s {
nxt_conn_t *conn;
};
+#define nxt_h1p_is_http11(h1p) \
+ ((h1p)->parser.version.s.minor != '0')
+
#endif /* _NXT_H1PROTO_H_INCLUDED_ */
diff --git a/src/nxt_http.h b/src/nxt_http.h
index 08e1fcbe..e812bd0d 100644
--- a/src/nxt_http.h
+++ b/src/nxt_http.h
@@ -173,6 +173,7 @@ struct nxt_http_request_s {
nxt_tstr_query_t *tstr_query;
nxt_tstr_cache_t tstr_cache;
+ nxt_http_action_t *action;
void *req_rpc_data;
#if (NXT_HAVE_REGEX)
@@ -227,6 +228,7 @@ typedef struct nxt_http_route_addr_rule_s nxt_http_route_addr_rule_t;
typedef struct {
nxt_conf_value_t *rewrite;
+ nxt_conf_value_t *set_headers;
nxt_conf_value_t *pass;
nxt_conf_value_t *ret;
nxt_conf_value_t *location;
@@ -255,6 +257,7 @@ struct nxt_http_action_s {
} u;
nxt_tstr_t *rewrite;
+ nxt_array_t *set_headers; /* of nxt_http_field_t */
nxt_http_action_t *fallback;
};
@@ -382,8 +385,11 @@ nxt_int_t nxt_upstreams_joint_create(nxt_router_temp_conf_t *tmcf,
nxt_int_t nxt_http_rewrite_init(nxt_router_conf_t *rtcf,
nxt_http_action_t *action, nxt_http_action_conf_t *acf);
-nxt_int_t nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r,
- nxt_http_action_t *action);
+nxt_int_t nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r);
+
+nxt_int_t nxt_http_set_headers_init(nxt_router_conf_t *rtcf,
+ nxt_http_action_t *action, nxt_http_action_conf_t *acf);
+nxt_int_t nxt_http_set_headers(nxt_http_request_t *r);
nxt_int_t nxt_http_return_init(nxt_router_conf_t *rtcf,
nxt_http_action_t *action, nxt_http_action_conf_t *acf);
diff --git a/src/nxt_http_request.c b/src/nxt_http_request.c
index 48f7dbe3..e532baff 100644
--- a/src/nxt_http_request.c
+++ b/src/nxt_http_request.c
@@ -560,11 +560,9 @@ nxt_http_request_action(nxt_task_t *task, nxt_http_request_t *r,
if (nxt_fast_path(action != NULL)) {
do {
- if (action->rewrite != NULL) {
- ret = nxt_http_rewrite(task, r, action);
- if (nxt_slow_path(ret != NXT_OK)) {
- break;
- }
+ ret = nxt_http_rewrite(task, r);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ break;
}
action = action->handler(task, r, action);
@@ -632,9 +630,15 @@ nxt_http_request_header_send(nxt_task_t *task, nxt_http_request_t *r,
nxt_work_handler_t body_handler, void *data)
{
u_char *p, *end, *server_string;
+ nxt_int_t ret;
nxt_http_field_t *server, *date, *content_length;
nxt_socket_conf_t *skcf;
+ ret = nxt_http_set_headers(r);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ goto fail;
+ }
+
/*
* TODO: "Server", "Date", and "Content-Length" processing should be moved
* to the last header filter.
diff --git a/src/nxt_http_rewrite.c b/src/nxt_http_rewrite.c
index b800a919..ae5c865a 100644
--- a/src/nxt_http_rewrite.c
+++ b/src/nxt_http_rewrite.c
@@ -10,8 +10,8 @@
nxt_int_t
nxt_http_rewrite_init(nxt_router_conf_t *rtcf, nxt_http_action_t *action,
- nxt_http_action_conf_t *acf)
- {
+ nxt_http_action_conf_t *acf)
+{
nxt_str_t str;
nxt_conf_get_string(acf->rewrite, &str);
@@ -26,15 +26,21 @@ nxt_http_rewrite_init(nxt_router_conf_t *rtcf, nxt_http_action_t *action,
nxt_int_t
-nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r,
- nxt_http_action_t *action)
+nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r)
{
u_char *p;
nxt_int_t ret;
nxt_str_t str, encoded_path, target;
nxt_router_conf_t *rtcf;
+ nxt_http_action_t *action;
nxt_http_request_parse_t rp;
+ action = r->action;
+
+ if (action == NULL || action->rewrite == NULL) {
+ return NXT_OK;
+ }
+
if (nxt_tstr_is_const(action->rewrite)) {
nxt_tstr_str(action->rewrite, &str);
diff --git a/src/nxt_http_route.c b/src/nxt_http_route.c
index 0935dd4a..4a64d5c1 100644
--- a/src/nxt_http_route.c
+++ b/src/nxt_http_route.c
@@ -584,6 +584,11 @@ static nxt_conf_map_t nxt_http_route_action_conf[] = {
offsetof(nxt_http_action_conf_t, rewrite)
},
{
+ nxt_string("response_headers"),
+ NXT_CONF_MAP_PTR,
+ offsetof(nxt_http_action_conf_t, set_headers)
+ },
+ {
nxt_string("pass"),
NXT_CONF_MAP_PTR,
offsetof(nxt_http_action_conf_t, pass)
@@ -671,6 +676,13 @@ nxt_http_action_init(nxt_task_t *task, nxt_router_temp_conf_t *tmcf,
}
}
+ if (acf.set_headers != NULL) {
+ ret = nxt_http_set_headers_init(rtcf, action, &acf);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ return ret;
+ }
+ }
+
if (acf.ret != NULL) {
return nxt_http_return_init(rtcf, action, &acf);
}
@@ -1573,6 +1585,11 @@ nxt_http_route_handler(nxt_task_t *task, nxt_http_request_t *r,
}
if (action != NULL) {
+
+ if (action != NXT_HTTP_ACTION_ERROR) {
+ r->action = action;
+ }
+
return action;
}
}
diff --git a/src/nxt_http_set_headers.c b/src/nxt_http_set_headers.c
new file mode 100644
index 00000000..25dd7478
--- /dev/null
+++ b/src/nxt_http_set_headers.c
@@ -0,0 +1,176 @@
+
+/*
+ * Copyright (C) Zhidao HONG
+ * Copyright (C) NGINX, Inc.
+ */
+
+#include <nxt_router.h>
+#include <nxt_http.h>
+
+
+typedef struct {
+ nxt_str_t name;
+ nxt_tstr_t *value;
+} nxt_http_header_val_t;
+
+
+nxt_int_t
+nxt_http_set_headers_init(nxt_router_conf_t *rtcf, nxt_http_action_t *action,
+ nxt_http_action_conf_t *acf)
+ {
+ uint32_t next;
+ nxt_str_t str, name;
+ nxt_array_t *headers;
+ nxt_conf_value_t *value;
+ nxt_http_header_val_t *hv;
+
+ headers = nxt_array_create(rtcf->mem_pool, 4,
+ sizeof(nxt_http_header_val_t));
+ if (nxt_slow_path(headers == NULL)) {
+ return NXT_ERROR;
+ }
+
+ action->set_headers = headers;
+
+ next = 0;
+
+ for ( ;; ) {
+ value = nxt_conf_next_object_member(acf->set_headers, &name, &next);
+ if (value == NULL) {
+ break;
+ }
+
+ hv = nxt_array_zero_add(headers);
+ if (nxt_slow_path(hv == NULL)) {
+ return NXT_ERROR;
+ }
+
+ hv->name.length = name.length;
+
+ hv->name.start = nxt_mp_nget(rtcf->mem_pool, name.length);
+ if (nxt_slow_path(hv->name.start == NULL)) {
+ return NXT_ERROR;
+ }
+
+ nxt_memcpy(hv->name.start, name.start, name.length);
+
+ if (nxt_conf_type(value) == NXT_CONF_STRING) {
+ nxt_conf_get_string(value, &str);
+
+ hv->value = nxt_tstr_compile(rtcf->tstr_state, &str, 0);
+ if (nxt_slow_path(hv->value == NULL)) {
+ return NXT_ERROR;
+ }
+ }
+ }
+
+ return NXT_OK;
+}
+
+
+static nxt_http_field_t *
+nxt_http_resp_header_find(nxt_http_request_t *r, u_char *name, size_t length)
+{
+ nxt_http_field_t *f;
+
+ nxt_list_each(f, r->resp.fields) {
+
+ if (f->skip) {
+ continue;
+ }
+
+ if (length == f->name_length
+ && nxt_memcasecmp(name, f->name, f->name_length) == 0)
+ {
+ return f;
+ }
+
+ } nxt_list_loop;
+
+ return NULL;
+}
+
+
+nxt_int_t
+nxt_http_set_headers(nxt_http_request_t *r)
+{
+ nxt_int_t ret;
+ nxt_uint_t i, n;
+ nxt_str_t *value;
+ nxt_http_field_t *f;
+ nxt_router_conf_t *rtcf;
+ nxt_http_action_t *action;
+ nxt_http_header_val_t *hv, *header;
+
+ action = r->action;
+
+ if (action == NULL || action->set_headers == NULL) {
+ return NXT_OK;
+ }
+
+ if ((r->status < NXT_HTTP_OK || r->status >= NXT_HTTP_BAD_REQUEST)) {
+ return NXT_OK;
+ }
+
+ rtcf = r->conf->socket_conf->router_conf;
+
+ header = action->set_headers->elts;
+ n = action->set_headers->nelts;
+
+ value = nxt_mp_zalloc(r->mem_pool, sizeof(nxt_str_t) * n);
+ if (nxt_slow_path(value == NULL)) {
+ return NXT_ERROR;
+ }
+
+ for (i = 0; i < n; i++) {
+ hv = &header[i];
+
+ if (hv->value == NULL) {
+ continue;
+ }
+
+ if (nxt_tstr_is_const(hv->value)) {
+ nxt_tstr_str(hv->value, &value[i]);
+
+ } else {
+ ret = nxt_tstr_query_init(&r->tstr_query, rtcf->tstr_state,
+ &r->tstr_cache, r, r->mem_pool);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ return NXT_ERROR;
+ }
+
+ nxt_tstr_query(&r->task, r->tstr_query, hv->value, &value[i]);
+
+ if (nxt_slow_path(nxt_tstr_query_failed(r->tstr_query))) {
+ return NXT_ERROR;
+ }
+ }
+ }
+
+ for (i = 0; i < n; i++) {
+ hv = &header[i];
+
+ f = nxt_http_resp_header_find(r, hv->name.start, hv->name.length);
+
+ if (value[i].start != NULL) {
+
+ if (f == NULL) {
+ f = nxt_list_zero_add(r->resp.fields);
+ if (nxt_slow_path(f == NULL)) {
+ return NXT_ERROR;
+ }
+
+ f->name = hv->name.start;
+ f->name_length = hv->name.length;
+ }
+
+ f->value = value[i].start;
+ f->value_length = value[i].length;
+
+ } else if (f != NULL) {
+ f->skip = 1;
+ }
+ }
+
+ return NXT_OK;
+}
diff --git a/src/nxt_http_static.c b/src/nxt_http_static.c
index 5e44aab4..e51ba6b0 100644
--- a/src/nxt_http_static.c
+++ b/src/nxt_http_static.c
@@ -696,6 +696,8 @@ nxt_http_static_next(nxt_task_t *task, nxt_http_request_t *r,
if (nxt_slow_path(r->log_route)) {
nxt_log(task, NXT_LOG_NOTICE, "\"fallback\" taken");
}
+
+ r->action = action->fallback;
nxt_http_request_action(task, r, action->fallback);
return;
}
diff --git a/src/nxt_http_variables.c b/src/nxt_http_variables.c
index b73d9151..46594a6b 100644
--- a/src/nxt_http_variables.c
+++ b/src/nxt_http_variables.c
@@ -5,96 +5,118 @@
#include <nxt_router.h>
#include <nxt_http.h>
+#include <nxt_h1proto.h>
static nxt_int_t nxt_http_var_dollar(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_request_time(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_method(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_request_uri(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_uri(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field);
+ void *data);
static nxt_int_t nxt_http_var_host(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field);
+ void *data);
static nxt_int_t nxt_http_var_remote_addr(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_time_local(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static u_char *nxt_http_log_date(u_char *buf, nxt_realtime_t *now,
struct tm *tm, size_t size, const char *format);
static nxt_int_t nxt_http_var_request_line(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_status(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_body_bytes_sent(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_referer(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_user_agent(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
+static nxt_int_t nxt_http_var_response_connection(nxt_task_t *task,
+ nxt_str_t *str, void *ctx, void *data);
+static nxt_int_t nxt_http_var_response_content_length(nxt_task_t *task,
+ nxt_str_t *str, void *ctx, void *data);
+static nxt_int_t nxt_http_var_response_transfer_encoding(nxt_task_t *task,
+ nxt_str_t *str, void *ctx, void *data);
static nxt_int_t nxt_http_var_arg(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field);
+ void *data);
static nxt_int_t nxt_http_var_header(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
static nxt_int_t nxt_http_var_cookie(nxt_task_t *task, nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
+static nxt_int_t nxt_http_var_response_header(nxt_task_t *task, nxt_str_t *str,
+ void *ctx, void *data);
static nxt_var_decl_t nxt_http_vars[] = {
{
.name = nxt_string("dollar"),
.handler = nxt_http_var_dollar,
+ .cacheable = 1,
}, {
.name = nxt_string("request_time"),
.handler = nxt_http_var_request_time,
+ .cacheable = 1,
}, {
.name = nxt_string("method"),
.handler = nxt_http_var_method,
+ .cacheable = 1,
}, {
.name = nxt_string("request_uri"),
.handler = nxt_http_var_request_uri,
+ .cacheable = 1,
}, {
.name = nxt_string("uri"),
.handler = nxt_http_var_uri,
+ .cacheable = 0,
}, {
.name = nxt_string("host"),
.handler = nxt_http_var_host,
+ .cacheable = 1,
}, {
.name = nxt_string("remote_addr"),
.handler = nxt_http_var_remote_addr,
+ .cacheable = 1,
}, {
.name = nxt_string("time_local"),
.handler = nxt_http_var_time_local,
+ .cacheable = 1,
}, {
.name = nxt_string("request_line"),
.handler = nxt_http_var_request_line,
+ .cacheable = 1,
}, {
.name = nxt_string("status"),
.handler = nxt_http_var_status,
+ .cacheable = 1,
}, {
.name = nxt_string("body_bytes_sent"),
.handler = nxt_http_var_body_bytes_sent,
+ .cacheable = 1,
}, {
.name = nxt_string("header_referer"),
.handler = nxt_http_var_referer,
+ .cacheable = 1,
}, {
- .name = nxt_string("header_user_agent"),
- .handler = nxt_http_var_user_agent,
+ .name = nxt_string("response_header_connection"),
+ .handler = nxt_http_var_response_connection,
+ .cacheable = 1,
}, {
- .name = nxt_string("arg"),
- .handler = nxt_http_var_arg,
- .field_hash = nxt_http_argument_hash,
+ .name = nxt_string("response_header_content_length"),
+ .handler = nxt_http_var_response_content_length,
+ .cacheable = 1,
}, {
- .name = nxt_string("header"),
- .handler = nxt_http_var_header,
- .field_hash = nxt_http_header_hash,
+ .name = nxt_string("response_header_transfer_encoding"),
+ .handler = nxt_http_var_response_transfer_encoding,
+ .cacheable = 1,
}, {
- .name = nxt_string("cookie"),
- .handler = nxt_http_var_cookie,
- .field_hash = nxt_http_cookie_hash,
+ .name = nxt_string("header_user_agent"),
+ .handler = nxt_http_var_user_agent,
+ .cacheable = 1,
},
};
@@ -106,8 +128,99 @@ nxt_http_register_variables(void)
}
+nxt_int_t
+nxt_http_unknown_var_ref(nxt_tstr_state_t *state, nxt_var_ref_t *ref,
+ nxt_str_t *name)
+{
+ int64_t hash;
+ nxt_str_t str, *lower;
+
+ if (nxt_str_start(name, "response_header_", 16)) {
+ ref->handler = nxt_http_var_response_header;
+ ref->cacheable = 0;
+
+ str.start = name->start + 16;
+ str.length = name->length - 16;
+
+ if (str.length == 0) {
+ return NXT_ERROR;
+ }
+
+ lower = nxt_str_alloc(state->pool, str.length);
+ if (nxt_slow_path(lower == NULL)) {
+ return NXT_ERROR;
+ }
+
+ nxt_memcpy_lowcase(lower->start, str.start, str.length);
+
+ ref->data = lower;
+
+ return NXT_OK;
+ }
+
+ if (nxt_str_start(name, "header_", 7)) {
+ ref->handler = nxt_http_var_header;
+ ref->cacheable = 1;
+
+ str.start = name->start + 7;
+ str.length = name->length - 7;
+
+ if (str.length == 0) {
+ return NXT_ERROR;
+ }
+
+ hash = nxt_http_header_hash(state->pool, &str);
+ if (nxt_slow_path(hash == -1)) {
+ return NXT_ERROR;
+ }
+
+ } else if (nxt_str_start(name, "arg_", 4)) {
+ ref->handler = nxt_http_var_arg;
+ ref->cacheable = 1;
+
+ str.start = name->start + 4;
+ str.length = name->length - 4;
+
+ if (str.length == 0) {
+ return NXT_ERROR;
+ }
+
+ hash = nxt_http_argument_hash(state->pool, &str);
+ if (nxt_slow_path(hash == -1)) {
+ return NXT_ERROR;
+ }
+
+ } else if (nxt_str_start(name, "cookie_", 7)) {
+ ref->handler = nxt_http_var_cookie;
+ ref->cacheable = 1;
+
+ str.start = name->start + 7;
+ str.length = name->length - 7;
+
+ if (str.length == 0) {
+ return NXT_ERROR;
+ }
+
+ hash = nxt_http_cookie_hash(state->pool, &str);
+ if (nxt_slow_path(hash == -1)) {
+ return NXT_ERROR;
+ }
+
+ } else {
+ return NXT_ERROR;
+ }
+
+ ref->data = nxt_var_field_new(state->pool, &str, (uint32_t) hash);
+ if (nxt_slow_path(ref->data == NULL)) {
+ return NXT_ERROR;
+ }
+
+ return NXT_OK;
+}
+
+
static nxt_int_t
-nxt_http_var_dollar(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_dollar(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_str_set(str, "$");
@@ -117,7 +230,7 @@ nxt_http_var_dollar(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
nxt_http_var_request_time(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+ void *data)
{
u_char *p;
nxt_msec_t ms;
@@ -144,7 +257,7 @@ nxt_http_var_request_time(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
-nxt_http_var_method(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_method(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -158,7 +271,7 @@ nxt_http_var_method(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
nxt_http_var_request_uri(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+ void *data)
{
nxt_http_request_t *r;
@@ -171,7 +284,7 @@ nxt_http_var_request_uri(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
-nxt_http_var_uri(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_uri(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -184,7 +297,7 @@ nxt_http_var_uri(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
-nxt_http_var_host(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_host(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -198,7 +311,7 @@ nxt_http_var_host(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
nxt_http_var_remote_addr(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+ void *data)
{
nxt_http_request_t *r;
@@ -212,8 +325,7 @@ nxt_http_var_remote_addr(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
-nxt_http_var_time_local(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+nxt_http_var_time_local(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -271,7 +383,7 @@ nxt_http_log_date(u_char *buf, nxt_realtime_t *now, struct tm *tm,
static nxt_int_t
nxt_http_var_request_line(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+ void *data)
{
nxt_http_request_t *r;
@@ -285,8 +397,9 @@ nxt_http_var_request_line(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
nxt_http_var_body_bytes_sent(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+ void *data)
{
+ u_char *p;
nxt_off_t bytes;
nxt_http_request_t *r;
@@ -299,16 +412,18 @@ nxt_http_var_body_bytes_sent(nxt_task_t *task, nxt_str_t *str, void *ctx,
bytes = nxt_http_proto[r->protocol].body_bytes_sent(task, r->proto);
- str->length = nxt_sprintf(str->start, str->start + NXT_OFF_T_LEN, "%O",
- bytes) - str->start;
+ p = nxt_sprintf(str->start, str->start + NXT_OFF_T_LEN, "%O", bytes);
+
+ str->length = p - str->start;
return NXT_OK;
}
static nxt_int_t
-nxt_http_var_status(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_status(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
+ u_char *p;
nxt_http_request_t *r;
r = ctx;
@@ -318,16 +433,16 @@ nxt_http_var_status(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
return NXT_ERROR;
}
- str->length = nxt_sprintf(str->start, str->start + 3, "%03d", r->status)
- - str->start;
+ p = nxt_sprintf(str->start, str->start + 3, "%03d", r->status);
+
+ str->length = p - str->start;
return NXT_OK;
}
static nxt_int_t
-nxt_http_var_referer(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+nxt_http_var_referer(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -346,8 +461,7 @@ nxt_http_var_referer(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
-nxt_http_var_user_agent(nxt_task_t *task, nxt_str_t *str, void *ctx,
- uint16_t field)
+nxt_http_var_user_agent(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_http_request_t *r;
@@ -366,19 +480,112 @@ nxt_http_var_user_agent(nxt_task_t *task, nxt_str_t *str, void *ctx,
static nxt_int_t
-nxt_http_var_arg(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_response_connection(nxt_task_t *task, nxt_str_t *str, void *ctx,
+ void *data)
+{
+ nxt_int_t conn;
+ nxt_bool_t http11;
+ nxt_h1proto_t *h1p;
+ nxt_http_request_t *r;
+
+ static const nxt_str_t connection[3] = {
+ nxt_string("close"),
+ nxt_string("keep-alive"),
+ nxt_string("Upgrade"),
+ };
+
+ r = ctx;
+ h1p = r->proto.h1;
+
+ conn = -1;
+
+ if (r->websocket_handshake && r->status == NXT_HTTP_SWITCHING_PROTOCOLS) {
+ conn = 2;
+
+ } else {
+ http11 = nxt_h1p_is_http11(h1p);
+
+ if (http11 ^ h1p->keepalive) {
+ conn = h1p->keepalive;
+ }
+ }
+
+ if (conn >= 0) {
+ *str = connection[conn];
+
+ } else {
+ nxt_str_null(str);
+ }
+
+ return NXT_OK;
+}
+
+
+static nxt_int_t
+nxt_http_var_response_content_length(nxt_task_t *task, nxt_str_t *str,
+ void *ctx, void *data)
+{
+ u_char *p;
+ nxt_http_request_t *r;
+
+ r = ctx;
+
+ if (r->resp.content_length != NULL) {
+ str->length = r->resp.content_length->value_length;
+ str->start = r->resp.content_length->value;
+
+ return NXT_OK;
+ }
+
+ if (r->resp.content_length_n >= 0) {
+ str->start = nxt_mp_nget(r->mem_pool, NXT_OFF_T_LEN);
+ if (str->start == NULL) {
+ return NXT_ERROR;
+ }
+
+ p = nxt_sprintf(str->start, str->start + NXT_OFF_T_LEN,
+ "%O", r->resp.content_length_n);
+
+ str->length = p - str->start;
+
+ return NXT_OK;
+ }
+
+ nxt_str_null(str);
+
+ return NXT_OK;
+}
+
+
+static nxt_int_t
+nxt_http_var_response_transfer_encoding(nxt_task_t *task, nxt_str_t *str,
+ void *ctx, void *data)
+{
+ nxt_http_request_t *r;
+
+ r = ctx;
+
+ if (r->proto.h1->chunked) {
+ nxt_str_set(str, "chunked");
+
+ } else {
+ nxt_str_null(str);
+ }
+
+ return NXT_OK;
+}
+
+
+static nxt_int_t
+nxt_http_var_arg(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_array_t *args;
nxt_var_field_t *vf;
- nxt_router_conf_t *rtcf;
nxt_http_request_t *r;
nxt_http_name_value_t *nv, *start;
r = ctx;
-
- rtcf = r->conf->socket_conf->router_conf;
-
- vf = nxt_var_field_get(rtcf->tstr_state->var_fields, field);
+ vf = data;
args = nxt_http_arguments_parse(r);
if (nxt_slow_path(args == NULL)) {
@@ -410,18 +617,14 @@ nxt_http_var_arg(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
-nxt_http_var_header(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_header(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_var_field_t *vf;
nxt_http_field_t *f;
- nxt_router_conf_t *rtcf;
nxt_http_request_t *r;
r = ctx;
-
- rtcf = r->conf->socket_conf->router_conf;
-
- vf = nxt_var_field_get(rtcf->tstr_state->var_fields, field);
+ vf = data;
nxt_list_each(f, r->fields) {
@@ -444,19 +647,15 @@ nxt_http_var_header(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
static nxt_int_t
-nxt_http_var_cookie(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
+nxt_http_var_cookie(nxt_task_t *task, nxt_str_t *str, void *ctx, void *data)
{
nxt_array_t *cookies;
nxt_var_field_t *vf;
- nxt_router_conf_t *rtcf;
nxt_http_request_t *r;
nxt_http_name_value_t *nv, *end;
r = ctx;
-
- rtcf = r->conf->socket_conf->router_conf;
-
- vf = nxt_var_field_get(rtcf->tstr_state->var_fields, field);
+ vf = data;
cookies = nxt_http_cookies_parse(r);
if (nxt_slow_path(cookies == NULL)) {
@@ -485,3 +684,65 @@ nxt_http_var_cookie(nxt_task_t *task, nxt_str_t *str, void *ctx, uint16_t field)
return NXT_OK;
}
+
+
+static int
+nxt_http_field_name_cmp(nxt_str_t *name, nxt_http_field_t *field)
+{
+ size_t i;
+ u_char c1, c2;
+
+ if (name->length != field->name_length) {
+ return 1;
+ }
+
+ for (i = 0; i < name->length; i++) {
+ c1 = name->start[i];
+ c2 = field->name[i];
+
+ if (c2 >= 'A' && c2 <= 'Z') {
+ c2 |= 0x20;
+
+ } else if (c2 == '-') {
+ c2 = '_';
+ }
+
+ if (c1 != c2) {
+ return 1;
+ }
+ }
+
+ return 0;
+}
+
+
+static nxt_int_t
+nxt_http_var_response_header(nxt_task_t *task, nxt_str_t *str, void *ctx,
+ void *data)
+{
+ nxt_str_t *name;
+ nxt_http_field_t *f;
+ nxt_http_request_t *r;
+
+ r = ctx;
+ name = data;
+
+ nxt_list_each(f, r->resp.fields) {
+
+ if (f->skip) {
+ continue;
+ }
+
+ if (nxt_http_field_name_cmp(name, f) == 0) {
+ str->start = f->value;
+ str->length = f->value_length;
+
+ return NXT_OK;
+ }
+
+ } nxt_list_loop;
+
+ nxt_str_null(str);
+
+ return NXT_OK;
+}
diff --git a/src/nxt_js.c b/src/nxt_js.c
index df945db6..74663660 100644
--- a/src/nxt_js.c
+++ b/src/nxt_js.c
@@ -386,11 +386,11 @@ nxt_js_call(nxt_task_t *task, nxt_js_conf_t *jcf, nxt_js_cache_t *cache,
nxt_js_t *js, nxt_str_t *str, void *ctx)
{
njs_vm_t *vm;
- njs_int_t rc, ret;
+ njs_int_t ret;
njs_str_t res;
- njs_value_t *array, *value;
+ njs_value_t *value;
njs_function_t *func;
- njs_opaque_value_t opaque_value, arguments[6];
+ njs_opaque_value_t retval, opaque_value, arguments[6];
static const njs_str_t uri_str = njs_str("uri");
static const njs_str_t host_str = njs_str("host");
@@ -407,15 +407,12 @@ nxt_js_call(nxt_task_t *task, nxt_js_conf_t *jcf, nxt_js_cache_t *cache,
return NXT_ERROR;
}
- ret = njs_vm_start(vm);
+ cache->vm = vm;
+
+ ret = njs_vm_start(vm, &cache->array);
if (ret != NJS_OK) {
return NXT_ERROR;
}
-
- array = njs_vm_retval(vm);
-
- cache->vm = vm;
- cache->array = *array;
}
value = njs_vm_array_prop(vm, &cache->array, js->index, &opaque_value);
@@ -463,18 +460,20 @@ nxt_js_call(nxt_task_t *task, nxt_js_conf_t *jcf, nxt_js_cache_t *cache,
return NXT_ERROR;
}
- ret = njs_vm_call(vm, func, njs_value_arg(&arguments), 6);
-
- rc = njs_vm_retval_string(vm, &res);
- if (rc != NJS_OK) {
- return NXT_ERROR;
- }
+ ret = njs_vm_invoke(vm, func, njs_value_arg(&arguments), 6,
+ njs_value_arg(&retval));
if (ret != NJS_OK) {
- nxt_alert(task, "js exception: %V", &res);
+ ret = njs_vm_exception_string(vm, &res);
+ if (ret == NJS_OK) {
+ nxt_alert(task, "js exception: %V", &res);
+ }
+
return NXT_ERROR;
}
+ ret = njs_vm_value_string(vm, &res, njs_value_arg(&retval));
+
str->length = res.length;
str->start = res.start;
@@ -498,7 +497,7 @@ nxt_js_error(njs_vm_t *vm, u_char *error)
njs_str_t res;
nxt_str_t err;
- ret = njs_vm_retval_string(vm, &res);
+ ret = njs_vm_exception_string(vm, &res);
if (nxt_slow_path(ret != NJS_OK)) {
return NXT_ERROR;
}
diff --git a/src/nxt_main.h b/src/nxt_main.h
index a7e0c283..aa96256e 100644
--- a/src/nxt_main.h
+++ b/src/nxt_main.h
@@ -68,6 +68,8 @@ typedef uint16_t nxt_port_id_t;
#include <nxt_sprintf.h>
#include <nxt_parse.h>
+
+typedef struct nxt_tstr_state_s nxt_tstr_state_t;
#include <nxt_var.h>
#include <nxt_tstr.h>
diff --git a/src/nxt_main_process.c b/src/nxt_main_process.c
index 7cba08d4..6622f67e 100644
--- a/src/nxt_main_process.c
+++ b/src/nxt_main_process.c
@@ -323,6 +323,60 @@ static nxt_conf_map_t nxt_java_app_conf[] = {
};
+static nxt_conf_map_t nxt_wasm_app_conf[] = {
+ {
+ nxt_string("module"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.module),
+ },
+ {
+ nxt_string("request_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.request_handler),
+ },
+ {
+ nxt_string("malloc_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.malloc_handler),
+ },
+ {
+ nxt_string("free_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.free_handler),
+ },
+ {
+ nxt_string("module_init_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.module_init_handler),
+ },
+ {
+ nxt_string("module_end_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.module_end_handler),
+ },
+ {
+ nxt_string("request_init_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.request_init_handler),
+ },
+ {
+ nxt_string("request_end_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.request_end_handler),
+ },
+ {
+ nxt_string("response_end_handler"),
+ NXT_CONF_MAP_CSTRZ,
+ offsetof(nxt_common_app_conf_t, u.wasm.response_end_handler),
+ },
+ {
+ nxt_string("access"),
+ NXT_CONF_MAP_PTR,
+ offsetof(nxt_common_app_conf_t, u.wasm.access),
+ },
+};
+
+
static nxt_conf_app_map_t nxt_app_maps[] = {
{ nxt_nitems(nxt_external_app_conf), nxt_external_app_conf },
{ nxt_nitems(nxt_python_app_conf), nxt_python_app_conf },
@@ -330,6 +384,7 @@ static nxt_conf_app_map_t nxt_app_maps[] = {
{ nxt_nitems(nxt_perl_app_conf), nxt_perl_app_conf },
{ nxt_nitems(nxt_ruby_app_conf), nxt_ruby_app_conf },
{ nxt_nitems(nxt_java_app_conf), nxt_java_app_conf },
+ { nxt_nitems(nxt_wasm_app_conf), nxt_wasm_app_conf },
};
diff --git a/src/nxt_router.c b/src/nxt_router.c
index d089cfb8..4e3cb303 100644
--- a/src/nxt_router.c
+++ b/src/nxt_router.c
@@ -274,12 +274,13 @@ static const nxt_str_t http_prefix = nxt_string("HTTP_");
static const nxt_str_t empty_prefix = nxt_string("");
static const nxt_str_t *nxt_app_msg_prefix[] = {
- &empty_prefix,
- &empty_prefix,
- &http_prefix,
- &http_prefix,
- &http_prefix,
- &empty_prefix,
+ [NXT_APP_EXTERNAL] = &empty_prefix,
+ [NXT_APP_PYTHON] = &empty_prefix,
+ [NXT_APP_PHP] = &http_prefix,
+ [NXT_APP_PERL] = &http_prefix,
+ [NXT_APP_RUBY] = &http_prefix,
+ [NXT_APP_JAVA] = &empty_prefix,
+ [NXT_APP_WASM] = &empty_prefix,
};
diff --git a/src/nxt_runtime.c b/src/nxt_runtime.c
index 96f801fb..9bfabc75 100644
--- a/src/nxt_runtime.c
+++ b/src/nxt_runtime.c
@@ -966,6 +966,13 @@ nxt_runtime_conf_read_cmd(nxt_task_t *task, nxt_runtime_t *rt)
"option \"--statedir\" requires directory\n";
static const char no_tmp[] = "option \"--tmpdir\" requires directory\n";
+ static const char modules_deprecated[] =
+ "option \"--modules\" is deprecated; use \"--modulesdir\" instead\n";
+ static const char state_deprecated[] =
+ "option \"--state\" is deprecated; use \"--statedir\" instead\n";
+ static const char tmp_deprecated[] =
+ "option \"--tmp\" is deprecated; use \"--tmpdir\" instead\n";
+
static const char help[] =
"\n"
"unit options:\n"
@@ -992,6 +999,10 @@ nxt_runtime_conf_read_cmd(nxt_task_t *task, nxt_runtime_t *rt)
" --tmpdir DIR set tmp directory name\n"
" default: \"" NXT_TMPDIR "\"\n"
"\n"
+ " --modules DIR [deprecated] synonym for --modulesdir\n"
+ " --state DIR [deprecated] synonym for --statedir\n"
+ " --tmp DIR [deprecated] synonym for --tmpdir\n"
+ "\n"
" --user USER set non-privileged processes to run"
" as specified user\n"
" default: \"" NXT_USER "\"\n"
@@ -1073,7 +1084,14 @@ nxt_runtime_conf_read_cmd(nxt_task_t *task, nxt_runtime_t *rt)
continue;
}
+ if (nxt_strcmp(p, "--modules") == 0) {
+ write(STDERR_FILENO, modules_deprecated,
+ nxt_length(modules_deprecated));
+ goto modulesdir;
+ }
+
if (nxt_strcmp(p, "--modulesdir") == 0) {
+modulesdir:
if (*argv == NULL) {
write(STDERR_FILENO, no_modules, nxt_length(no_modules));
return NXT_ERROR;
@@ -1086,7 +1104,14 @@ nxt_runtime_conf_read_cmd(nxt_task_t *task, nxt_runtime_t *rt)
continue;
}
+ if (nxt_strcmp(p, "--state") == 0) {
+ write(STDERR_FILENO, state_deprecated,
+ nxt_length(state_deprecated));
+ goto statedir;
+ }
+
if (nxt_strcmp(p, "--statedir") == 0) {
+statedir:
if (*argv == NULL) {
write(STDERR_FILENO, no_state, nxt_length(no_state));
return NXT_ERROR;
@@ -1099,7 +1124,13 @@ nxt_runtime_conf_read_cmd(nxt_task_t *task, nxt_runtime_t *rt)
continue;
}
+ if (nxt_strcmp(p, "--tmp") == 0) {
+ write(STDERR_FILENO, tmp_deprecated, nxt_length(tmp_deprecated));
+ goto tmpdir;
+ }
+
if (nxt_strcmp(p, "--tmpdir") == 0) {
+tmpdir:
if (*argv == NULL) {
write(STDERR_FILENO, no_tmp, nxt_length(no_tmp));
return NXT_ERROR;
diff --git a/src/nxt_tstr.c b/src/nxt_tstr.c
index 516415d9..edf6860a 100644
--- a/src/nxt_tstr.c
+++ b/src/nxt_tstr.c
@@ -42,8 +42,8 @@ struct nxt_tstr_query_s {
void *ctx;
void *data;
- nxt_work_handler_t ready;
- nxt_work_handler_t error;
+ nxt_work_handler_t ready;
+ nxt_work_handler_t error;
};
@@ -64,8 +64,8 @@ nxt_tstr_state_new(nxt_mp_t *mp, nxt_bool_t test)
state->pool = mp;
state->test = test;
- state->var_fields = nxt_array_create(mp, 4, sizeof(nxt_var_field_t));
- if (nxt_slow_path(state->var_fields == NULL)) {
+ state->var_refs = nxt_array_create(mp, 4, sizeof(nxt_var_ref_t));
+ if (nxt_slow_path(state->var_refs == NULL)) {
return NULL;
}
@@ -133,8 +133,7 @@ nxt_tstr_compile(nxt_tstr_state_t *state, nxt_str_t *str,
if (p != NULL) {
tstr->type = NXT_TSTR_VAR;
- tstr->u.var = nxt_var_compile(&tstr->str, state->pool,
- state->var_fields);
+ tstr->u.var = nxt_var_compile(state, &tstr->str);
if (nxt_slow_path(tstr->u.var == NULL)) {
return NULL;
}
@@ -168,7 +167,7 @@ nxt_tstr_test(nxt_tstr_state_t *state, nxt_str_t *str, u_char *error)
p = memchr(str->start, '$', str->length);
if (p != NULL) {
- return nxt_var_test(str, state->var_fields, error);
+ return nxt_var_test(state, str, error);
}
}
@@ -263,8 +262,9 @@ nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr,
}
if (tstr->type == NXT_TSTR_VAR) {
- ret = nxt_var_interpreter(task, &query->cache->var, tstr->u.var, val,
- query->ctx, tstr->flags & NXT_TSTR_LOGGING);
+ ret = nxt_var_interpreter(task, query->state, &query->cache->var,
+ tstr->u.var, val, query->ctx,
+ tstr->flags & NXT_TSTR_LOGGING);
if (nxt_slow_path(ret != NXT_OK)) {
query->failed = 1;
diff --git a/src/nxt_tstr.h b/src/nxt_tstr.h
index afa7f56d..3e842f81 100644
--- a/src/nxt_tstr.h
+++ b/src/nxt_tstr.h
@@ -13,14 +13,14 @@ typedef struct nxt_tstr_s nxt_tstr_t;
typedef struct nxt_tstr_query_s nxt_tstr_query_t;
-typedef struct {
+struct nxt_tstr_state_s {
nxt_mp_t *pool;
- nxt_array_t *var_fields;
+ nxt_array_t *var_refs;
#if (NXT_HAVE_NJS)
nxt_js_conf_t *jcf;
#endif
uint8_t test; /* 1 bit */
-} nxt_tstr_state_t;
+};
typedef struct {
diff --git a/src/nxt_var.c b/src/nxt_var.c
index e113969f..729de788 100644
--- a/src/nxt_var.c
+++ b/src/nxt_var.c
@@ -50,14 +50,11 @@ struct nxt_var_query_s {
static nxt_int_t nxt_var_hash_test(nxt_lvlhsh_query_t *lhq, void *data);
static nxt_var_decl_t *nxt_var_hash_find(nxt_str_t *name);
-static nxt_var_decl_t *nxt_var_decl_get(nxt_str_t *name, nxt_array_t *fields,
- uint32_t *index);
-static nxt_var_field_t *nxt_var_field_add(nxt_array_t *fields, nxt_str_t *name,
- uint32_t hash);
+static nxt_var_ref_t *nxt_var_ref_get(nxt_tstr_state_t *state, nxt_str_t *name);
static nxt_int_t nxt_var_cache_test(nxt_lvlhsh_query_t *lhq, void *data);
-static nxt_str_t *nxt_var_cache_value(nxt_task_t *task, nxt_var_cache_t *cache,
- uint32_t index, void *ctx);
+static nxt_str_t *nxt_var_cache_value(nxt_task_t *task, nxt_tstr_state_t *state,
+ nxt_var_cache_t *cache, uint32_t index, void *ctx);
static u_char *nxt_var_next_part(u_char *start, u_char *end, nxt_str_t *part);
@@ -80,7 +77,7 @@ static const nxt_lvlhsh_proto_t nxt_var_cache_proto nxt_aligned(64) = {
static nxt_lvlhsh_t nxt_var_hash;
static uint32_t nxt_var_count;
-static nxt_var_handler_t *nxt_var_index;
+static nxt_var_decl_t **nxt_vars;
static nxt_int_t
@@ -111,95 +108,70 @@ nxt_var_hash_find(nxt_str_t *name)
}
-static nxt_var_decl_t *
-nxt_var_decl_get(nxt_str_t *name, nxt_array_t *fields, uint32_t *index)
+static nxt_var_ref_t *
+nxt_var_ref_get(nxt_tstr_state_t *state, nxt_str_t *name)
{
- u_char *p, *end;
- int64_t hash;
- uint16_t field;
- nxt_str_t str;
- nxt_var_decl_t *decl;
- nxt_var_field_t *f;
-
- f = NULL;
- field = 0;
- decl = nxt_var_hash_find(name);
+ nxt_int_t ret;
+ nxt_uint_t i;
+ nxt_var_ref_t *ref;
+ nxt_var_decl_t *decl;
- if (decl == NULL) {
- p = name->start;
- end = p + name->length;
+ ref = state->var_refs->elts;
- while (p < end) {
- if (*p++ == '_') {
- break;
- }
- }
+ for (i = 0; i < state->var_refs->nelts; i++) {
- if (p == end) {
- return NULL;
+ if (nxt_strstr_eq(ref[i].name, name)) {
+ return &ref[i];
}
+ }
- str.start = name->start;
- str.length = p - 1 - name->start;
+ ref = nxt_array_add(state->var_refs);
+ if (nxt_slow_path(ref == NULL)) {
+ return NULL;
+ }
- decl = nxt_var_hash_find(&str);
+ ref->index = state->var_refs->nelts - 1;
- if (decl != NULL) {
- str.start = p;
- str.length = end - p;
+ ref->name = nxt_str_dup(state->pool, NULL, name);
+ if (nxt_slow_path(ref->name == NULL)) {
+ return NULL;
+ }
- hash = decl->field_hash(fields->mem_pool, &str);
- if (nxt_slow_path(hash == -1)) {
- return NULL;
- }
+ decl = nxt_var_hash_find(name);
- f = nxt_var_field_add(fields, &str, (uint32_t) hash);
- if (nxt_slow_path(f == NULL)) {
- return NULL;
- }
+ if (decl != NULL) {
+ ref->handler = decl->handler;
+ ref->cacheable = decl->cacheable;
- field = f->index;
- }
+ return ref;
}
- if (decl != NULL) {
- if (decl->field_hash != NULL && f == NULL) {
- return NULL;
- }
-
- if (index != NULL) {
- *index = (decl->index << 16) | field;
- }
+ ret = nxt_http_unknown_var_ref(state, ref, name);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ return NULL;
}
- return decl;
+ return ref;
}
-static nxt_var_field_t *
-nxt_var_field_add(nxt_array_t *fields, nxt_str_t *name, uint32_t hash)
+nxt_var_field_t *
+nxt_var_field_new(nxt_mp_t *mp, nxt_str_t *name, uint32_t hash)
{
- nxt_uint_t i;
+ nxt_str_t *str;
nxt_var_field_t *field;
- field = fields->elts;
-
- for (i = 0; i < fields->nelts; i++) {
- if (field[i].hash == hash
- && nxt_strstr_eq(&field[i].name, name))
- {
- return field;
- }
+ field = nxt_mp_alloc(mp, sizeof(nxt_var_field_t));
+ if (nxt_slow_path(field == NULL)) {
+ return NULL;
}
- field = nxt_array_add(fields);
- if (nxt_slow_path(field == NULL)) {
+ str = nxt_str_dup(mp, &field->name, name);
+ if (nxt_slow_path(str == NULL)) {
return NULL;
}
- field->name = *name;
field->hash = hash;
- field->index = fields->nelts - 1;
return field;
}
@@ -230,13 +202,17 @@ nxt_var_cache_test(nxt_lvlhsh_query_t *lhq, void *data)
static nxt_str_t *
-nxt_var_cache_value(nxt_task_t *task, nxt_var_cache_t *cache, uint32_t index,
- void *ctx)
+nxt_var_cache_value(nxt_task_t *task, nxt_tstr_state_t *state,
+ nxt_var_cache_t *cache, uint32_t index, void *ctx)
{
nxt_int_t ret;
nxt_str_t *value;
+ nxt_var_ref_t *ref;
nxt_lvlhsh_query_t lhq;
+ ref = state->var_refs->elts;
+ ref = &ref[index];
+
value = cache->spare;
if (value == NULL) {
@@ -248,6 +224,10 @@ nxt_var_cache_value(nxt_task_t *task, nxt_var_cache_t *cache, uint32_t index,
cache->spare = value;
}
+ if (!ref->cacheable) {
+ goto not_cached;
+ }
+
lhq.key_hash = nxt_murmur_hash2_uint32(&index);
lhq.replace = 0;
lhq.key.length = sizeof(uint32_t);
@@ -261,16 +241,20 @@ nxt_var_cache_value(nxt_task_t *task, nxt_var_cache_t *cache, uint32_t index,
return NULL;
}
- if (ret == NXT_OK) {
- ret = nxt_var_index[index >> 16](task, value, ctx, index & 0xffff);
- if (nxt_slow_path(ret != NXT_OK)) {
- return NULL;
- }
+ if (ret == NXT_DECLINED) {
+ return lhq.value;
+ }
+
+not_cached:
- cache->spare = NULL;
+ ret = ref->handler(task, value, ctx, ref->data);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ return NULL;
}
- return lhq.value;
+ cache->spare = NULL;
+
+ return value;
}
@@ -303,12 +287,11 @@ nxt_int_t
nxt_var_index_init(void)
{
nxt_uint_t i;
- nxt_var_decl_t *decl;
- nxt_var_handler_t *index;
+ nxt_var_decl_t *decl, **vars;
nxt_lvlhsh_each_t lhe;
- index = nxt_memalign(64, nxt_var_count * sizeof(nxt_var_handler_t));
- if (index == NULL) {
+ vars = nxt_memalign(64, nxt_var_count * sizeof(nxt_var_decl_t *));
+ if (vars == NULL) {
return NXT_ERROR;
}
@@ -316,27 +299,25 @@ nxt_var_index_init(void)
for (i = 0; i < nxt_var_count; i++) {
decl = nxt_lvlhsh_each(&nxt_var_hash, &lhe);
- decl->index = i;
- index[i] = decl->handler;
+ vars[i] = decl;
}
- nxt_var_index = index;
+ nxt_vars = vars;
return NXT_OK;
}
nxt_var_t *
-nxt_var_compile(nxt_str_t *str, nxt_mp_t *mp, nxt_array_t *fields)
+nxt_var_compile(nxt_tstr_state_t *state, nxt_str_t *str)
{
- u_char *p, *end, *next, *src;
- size_t size;
- uint32_t index;
- nxt_var_t *var;
- nxt_str_t part;
- nxt_uint_t n;
- nxt_var_sub_t *subs;
- nxt_var_decl_t *decl;
+ u_char *p, *end, *next, *src;
+ size_t size;
+ nxt_var_t *var;
+ nxt_str_t part;
+ nxt_uint_t n;
+ nxt_var_sub_t *subs;
+ nxt_var_ref_t *ref;
n = 0;
@@ -356,7 +337,7 @@ nxt_var_compile(nxt_str_t *str, nxt_mp_t *mp, nxt_array_t *fields)
size = sizeof(nxt_var_t) + n * sizeof(nxt_var_sub_t) + str->length;
- var = nxt_mp_get(mp, size);
+ var = nxt_mp_get(state->pool, size);
if (nxt_slow_path(var == NULL)) {
return NULL;
}
@@ -376,12 +357,12 @@ nxt_var_compile(nxt_str_t *str, nxt_mp_t *mp, nxt_array_t *fields)
next = nxt_var_next_part(p, end, &part);
if (part.start != NULL) {
- decl = nxt_var_decl_get(&part, fields, &index);
- if (nxt_slow_path(decl == NULL)) {
+ ref = nxt_var_ref_get(state, &part);
+ if (nxt_slow_path(ref == NULL)) {
return NULL;
}
- subs[n].index = index;
+ subs[n].index = ref->index;
subs[n].length = next - p;
subs[n].position = p - str->start;
@@ -396,11 +377,11 @@ nxt_var_compile(nxt_str_t *str, nxt_mp_t *mp, nxt_array_t *fields)
nxt_int_t
-nxt_var_test(nxt_str_t *str, nxt_array_t *fields, u_char *error)
+nxt_var_test(nxt_tstr_state_t *state, nxt_str_t *str, u_char *error)
{
- u_char *p, *end, *next;
- nxt_str_t part;
- nxt_var_decl_t *decl;
+ u_char *p, *end, *next;
+ nxt_str_t part;
+ nxt_var_ref_t *ref;
p = str->start;
end = p + str->length;
@@ -416,9 +397,9 @@ nxt_var_test(nxt_str_t *str, nxt_array_t *fields, u_char *error)
}
if (part.start != NULL) {
- decl = nxt_var_decl_get(&part, fields, NULL);
+ ref = nxt_var_ref_get(state, &part);
- if (decl == NULL) {
+ if (ref == NULL) {
nxt_sprintf(error, error + NXT_MAX_ERROR_STR,
"Unknown variable \"%V\"%Z", &part);
@@ -504,8 +485,9 @@ nxt_var_next_part(u_char *start, u_char *end, nxt_str_t *part)
nxt_int_t
-nxt_var_interpreter(nxt_task_t *task, nxt_var_cache_t *cache, nxt_var_t *var,
- nxt_str_t *str, void *ctx, nxt_bool_t logging)
+nxt_var_interpreter(nxt_task_t *task, nxt_tstr_state_t *state,
+ nxt_var_cache_t *cache, nxt_var_t *var, nxt_str_t *str, void *ctx,
+ nxt_bool_t logging)
{
u_char *p, *src;
size_t length, last, next;
@@ -522,7 +504,7 @@ nxt_var_interpreter(nxt_task_t *task, nxt_var_cache_t *cache, nxt_var_t *var,
length = var->length;
for (i = 0; i < var->vars; i++) {
- value = nxt_var_cache_value(task, cache, subs[i].index, ctx);
+ value = nxt_var_cache_value(task, state, cache, subs[i].index, ctx);
if (nxt_slow_path(value == NULL)) {
return NXT_ERROR;
}
diff --git a/src/nxt_var.h b/src/nxt_var.h
index ab25800d..fde64f1e 100644
--- a/src/nxt_var.h
+++ b/src/nxt_var.h
@@ -13,22 +13,29 @@ typedef struct nxt_var_query_s nxt_var_query_t;
typedef nxt_int_t (*nxt_var_handler_t)(nxt_task_t *task,
nxt_str_t *str,
- void *ctx, uint16_t field);
+ void *ctx, void *data);
typedef int64_t (*nxt_var_field_hash_t)(nxt_mp_t *mp, nxt_str_t *str);
typedef struct {
nxt_str_t name;
nxt_var_handler_t handler;
- nxt_var_field_hash_t field_hash;
- uint32_t index;
+ uint8_t cacheable; /* 1 bit */
} nxt_var_decl_t;
typedef struct {
+ nxt_str_t *name;
+ nxt_var_handler_t handler;
+ void *data;
+ uint32_t index;
+ uint8_t cacheable; /* 1 bit */
+} nxt_var_ref_t;
+
+
+typedef struct {
nxt_str_t name;
uint16_t hash;
- uint32_t index;
} nxt_var_field_t;
@@ -43,14 +50,20 @@ nxt_int_t nxt_var_register(nxt_var_decl_t *decl, size_t n);
nxt_int_t nxt_var_index_init(void);
nxt_var_field_t *nxt_var_field_get(nxt_array_t *fields, uint16_t index);
+nxt_var_field_t *nxt_var_field_new(nxt_mp_t *mp, nxt_str_t *name,
+ uint32_t hash);
-nxt_var_t *nxt_var_compile(nxt_str_t *str, nxt_mp_t *mp, nxt_array_t *fields);
-nxt_int_t nxt_var_test(nxt_str_t *str, nxt_array_t *fields, u_char *error);
+nxt_var_t *nxt_var_compile(nxt_tstr_state_t *state, nxt_str_t *str);
+nxt_int_t nxt_var_test(nxt_tstr_state_t *state, nxt_str_t *str, u_char *error);
-nxt_int_t nxt_var_interpreter(nxt_task_t *task, nxt_var_cache_t *cache,
- nxt_var_t *var, nxt_str_t *str, void *ctx, nxt_bool_t logging);
+nxt_int_t nxt_var_interpreter(nxt_task_t *task, nxt_tstr_state_t *state,
+ nxt_var_cache_t *cache, nxt_var_t *var, nxt_str_t *str, void *ctx,
+ nxt_bool_t logging);
nxt_str_t *nxt_var_get(nxt_task_t *task, nxt_var_cache_t *cache,
nxt_str_t *name, void *ctx);
+nxt_int_t nxt_http_unknown_var_ref(nxt_tstr_state_t *state, nxt_var_ref_t *ref,
+ nxt_str_t *name);
+
#endif /* _NXT_VAR_H_INCLUDED_ */
diff --git a/src/python/nxt_python_asgi.c b/src/python/nxt_python_asgi.c
index adf03e2b..8f300b53 100644
--- a/src/python/nxt_python_asgi.c
+++ b/src/python/nxt_python_asgi.c
@@ -450,6 +450,7 @@ static void
nxt_py_asgi_request_handler(nxt_unit_request_info_t *req)
{
PyObject *scope, *res, *task, *receive, *send, *done, *asgi;
+ PyObject *state, *newstate, *lifespan;
PyObject *stage2;
nxt_python_target_t *target;
nxt_py_asgi_ctx_data_t *ctx_data;
@@ -477,7 +478,7 @@ nxt_py_asgi_request_handler(nxt_unit_request_info_t *req)
}
send = PyObject_GetAttrString(asgi, "send");
- if (nxt_slow_path(receive == NULL)) {
+ if (nxt_slow_path(send == NULL)) {
nxt_unit_req_alert(req, "Python failed to get 'send' method");
nxt_unit_request_done(req, NXT_UNIT_ERROR);
@@ -485,7 +486,7 @@ nxt_py_asgi_request_handler(nxt_unit_request_info_t *req)
}
done = PyObject_GetAttrString(asgi, "_done");
- if (nxt_slow_path(receive == NULL)) {
+ if (nxt_slow_path(done == NULL)) {
nxt_unit_req_alert(req, "Python failed to get '_done' method");
nxt_unit_request_done(req, NXT_UNIT_ERROR);
@@ -493,15 +494,41 @@ nxt_py_asgi_request_handler(nxt_unit_request_info_t *req)
}
req->data = asgi;
+ ctx_data = req->ctx->data;
target = &nxt_py_targets->target[req->request->app_target];
+ lifespan = ctx_data->target_lifespans[req->request->app_target];
+ state = PyObject_GetAttr(lifespan, nxt_py_state_str);
+ if (nxt_slow_path(state == NULL)) {
+ nxt_unit_req_alert(req, "Python failed to get 'state' attribute");
+ nxt_unit_request_done(req, NXT_UNIT_ERROR);
+
+ goto release_done;
+ }
+
+ newstate = PyDict_Copy(state);
+ if (nxt_slow_path(newstate == NULL)) {
+ nxt_unit_req_alert(req, "Python failed to call state.copy()");
+ nxt_unit_request_done(req, NXT_UNIT_ERROR);
+ Py_DECREF(state);
+ goto release_done;
+ }
+ Py_DECREF(state);
scope = nxt_py_asgi_create_http_scope(req, target);
if (nxt_slow_path(scope == NULL)) {
nxt_unit_request_done(req, NXT_UNIT_ERROR);
-
+ Py_DECREF(newstate);
goto release_done;
}
+ if (nxt_slow_path(PyDict_SetItem(scope, nxt_py_state_str, newstate)
+ == -1))
+ {
+ Py_DECREF(newstate);
+ goto release_scope;
+ }
+ Py_DECREF(newstate);
+
if (!target->asgi_legacy) {
nxt_unit_req_debug(req, "Python call ASGI 3.0 application");
@@ -555,7 +582,6 @@ nxt_py_asgi_request_handler(nxt_unit_request_info_t *req)
goto release_scope;
}
- ctx_data = req->ctx->data;
task = PyObject_CallFunctionObjArgs(ctx_data->loop_create_task, res, NULL);
if (nxt_slow_path(task == NULL)) {
@@ -828,7 +854,7 @@ nxt_py_asgi_create_address(nxt_unit_sptr_t *sptr, uint8_t len, uint16_t port)
static PyObject *
nxt_py_asgi_create_ip_address(nxt_unit_sptr_t *sptr, uint8_t len, uint16_t port)
{
- char *p, *s;
+ char *p;
PyObject *pair, *v;
pair = PyTuple_New(2);
@@ -837,9 +863,8 @@ nxt_py_asgi_create_ip_address(nxt_unit_sptr_t *sptr, uint8_t len, uint16_t port)
}
p = nxt_unit_sptr_get(sptr);
- s = memchr(p, ':', len);
- v = PyString_FromStringAndSize(p, s == NULL ? len : s - p);
+ v = PyString_FromStringAndSize(p, len);
if (nxt_slow_path(v == NULL)) {
Py_DECREF(pair);
@@ -848,14 +873,7 @@ nxt_py_asgi_create_ip_address(nxt_unit_sptr_t *sptr, uint8_t len, uint16_t port)
PyTuple_SET_ITEM(pair, 0, v);
- if (s != NULL) {
- p += len;
- v = PyLong_FromString(s + 1, &p, 10);
-
- } else {
- v = PyLong_FromLong(port);
- }
-
+ v = PyLong_FromLong(port);
if (nxt_slow_path(v == NULL)) {
Py_DECREF(pair);
diff --git a/src/python/nxt_python_asgi_lifespan.c b/src/python/nxt_python_asgi_lifespan.c
index 1fc0e6b7..041cca21 100644
--- a/src/python/nxt_python_asgi_lifespan.c
+++ b/src/python/nxt_python_asgi_lifespan.c
@@ -12,6 +12,8 @@
#include <python/nxt_python_asgi.h>
#include <python/nxt_python_asgi_str.h>
+#include <structmember.h>
+
typedef struct {
PyObject_HEAD
@@ -25,6 +27,7 @@ typedef struct {
PyObject *startup_future;
PyObject *shutdown_future;
PyObject *receive_future;
+ PyObject *state;
} nxt_py_asgi_lifespan_t;
static PyObject *nxt_py_asgi_lifespan_target_startup(
@@ -41,6 +44,7 @@ static PyObject *nxt_py_asgi_lifespan_send_shutdown(
nxt_py_asgi_lifespan_t *lifespan, int v, PyObject *dict);
static PyObject *nxt_py_asgi_lifespan_disable(nxt_py_asgi_lifespan_t *lifespan);
static PyObject *nxt_py_asgi_lifespan_done(PyObject *self, PyObject *future);
+static void nxt_py_asgi_lifespan_dealloc(PyObject *self);
static PyMethodDef nxt_py_asgi_lifespan_methods[] = {
@@ -50,6 +54,26 @@ static PyMethodDef nxt_py_asgi_lifespan_methods[] = {
{ NULL, NULL, 0, 0 }
};
+static PyMemberDef nxt_py_asgi_lifespan_members[] = {
+ {
+#if PY_VERSION_HEX >= NXT_PYTHON_VER(3, 7)
+ .name = "state",
+#else
+ .name = (char *)"state",
+#endif
+ .type = T_OBJECT_EX,
+ .offset = offsetof(nxt_py_asgi_lifespan_t, state),
+ .flags = READONLY,
+#if PY_VERSION_HEX >= NXT_PYTHON_VER(3, 7)
+ .doc = PyDoc_STR("lifespan.state")
+#else
+ .doc = (char *)PyDoc_STR("lifespan.state")
+#endif
+ },
+
+ { NULL, 0, 0, 0, NULL }
+};
+
static PyAsyncMethods nxt_py_asgi_async_methods = {
.am_await = nxt_py_asgi_await,
};
@@ -59,13 +83,14 @@ static PyTypeObject nxt_py_asgi_lifespan_type = {
.tp_name = "unit._asgi_lifespan",
.tp_basicsize = sizeof(nxt_py_asgi_lifespan_t),
- .tp_dealloc = nxt_py_asgi_dealloc,
+ .tp_dealloc = nxt_py_asgi_lifespan_dealloc,
.tp_as_async = &nxt_py_asgi_async_methods,
.tp_flags = Py_TPFLAGS_DEFAULT,
.tp_doc = "unit ASGI Lifespan object",
.tp_iter = nxt_py_asgi_iter,
.tp_iternext = nxt_py_asgi_next,
.tp_methods = nxt_py_asgi_lifespan_methods,
+ .tp_members = nxt_py_asgi_lifespan_members,
};
@@ -163,12 +188,29 @@ nxt_py_asgi_lifespan_target_startup(nxt_py_asgi_ctx_data_t *ctx_data,
lifespan->shutdown_called = 0;
lifespan->shutdown_future = NULL;
lifespan->receive_future = NULL;
+ lifespan->state = NULL;
scope = nxt_py_asgi_new_scope(NULL, nxt_py_lifespan_str, nxt_py_2_0_str);
if (nxt_slow_path(scope == NULL)) {
goto release_future;
}
+ lifespan->state = PyDict_New();
+ if (nxt_slow_path(lifespan->state == NULL)) {
+ nxt_unit_req_error(NULL,
+ "Python failed to create 'state' dict");
+ goto release_future;
+ }
+
+ if (nxt_slow_path(PyDict_SetItem(scope, nxt_py_state_str,
+ lifespan->state) == -1))
+ {
+ nxt_unit_req_error(NULL,
+ "Python failed to set 'scope.state' item");
+ Py_CLEAR(lifespan->state);
+ goto release_future;
+ }
+
if (!target->asgi_legacy) {
nxt_unit_req_debug(NULL, "Python call ASGI 3.0 application");
@@ -604,4 +646,14 @@ nxt_py_asgi_lifespan_done(PyObject *self, PyObject *future)
}
+static void
+nxt_py_asgi_lifespan_dealloc(PyObject *self)
+{
+ nxt_py_asgi_lifespan_t *lifespan = (nxt_py_asgi_lifespan_t *)self;
+
+ Py_CLEAR(lifespan->state);
+ PyObject_Del(self);
+}
+
+
#endif /* NXT_HAVE_ASGI */
diff --git a/src/python/nxt_python_asgi_str.c b/src/python/nxt_python_asgi_str.c
index 7171d52b..3bea87d5 100644
--- a/src/python/nxt_python_asgi_str.c
+++ b/src/python/nxt_python_asgi_str.c
@@ -55,6 +55,7 @@ PyObject *nxt_py_subprotocol_str;
PyObject *nxt_py_subprotocols_str;
PyObject *nxt_py_text_str;
PyObject *nxt_py_type_str;
+PyObject *nxt_py_state_str;
PyObject *nxt_py_version_str;
PyObject *nxt_py_websocket_str;
PyObject *nxt_py_websocket_accept_str;
@@ -110,6 +111,7 @@ static nxt_python_string_t nxt_py_asgi_strings[] = {
{ nxt_string("subprotocols"), &nxt_py_subprotocols_str },
{ nxt_string("text"), &nxt_py_text_str },
{ nxt_string("type"), &nxt_py_type_str },
+ { nxt_string("state"), &nxt_py_state_str },
{ nxt_string("version"), &nxt_py_version_str },
{ nxt_string("websocket"), &nxt_py_websocket_str },
{ nxt_string("websocket.accept"), &nxt_py_websocket_accept_str },
diff --git a/src/python/nxt_python_asgi_str.h b/src/python/nxt_python_asgi_str.h
index 92969fd2..3c7a3ed9 100644
--- a/src/python/nxt_python_asgi_str.h
+++ b/src/python/nxt_python_asgi_str.h
@@ -50,6 +50,7 @@ extern PyObject *nxt_py_subprotocol_str;
extern PyObject *nxt_py_subprotocols_str;
extern PyObject *nxt_py_text_str;
extern PyObject *nxt_py_type_str;
+extern PyObject *nxt_py_state_str;
extern PyObject *nxt_py_version_str;
extern PyObject *nxt_py_websocket_str;
extern PyObject *nxt_py_websocket_accept_str;
diff --git a/src/test/nxt_unit_app_test.c b/src/test/nxt_unit_app_test.c
index d83bd83a..5dcebe18 100644
--- a/src/test/nxt_unit_app_test.c
+++ b/src/test/nxt_unit_app_test.c
@@ -257,8 +257,8 @@ greeting_app_request_handler(nxt_unit_request_info_t *req)
if (r->content_length > 0) {
p = copy(p, BODY, nxt_length(BODY));
- res = nxt_unit_request_read(req, buf->free, buf->end - buf->free);
- buf->free += res;
+ res = nxt_unit_request_read(req, p, buf->end - p);
+ p += res;
}
diff --git a/src/unit.pc.in b/src/unit.pc.in
new file mode 100644
index 00000000..4de0556f
--- /dev/null
+++ b/src/unit.pc.in
@@ -0,0 +1,11 @@
+prefix=@PREFIX@
+libdir=@LIBDIR@
+confargs=@CONFARGS@
+modulesdir=@MODULESDIR@
+
+Name: unit
+Description: library to embed Unit
+Version: @VERSION@
+URL: https://unit.nginx.org
+Cflags: @CFLAGS@
+Libs: -L${libdir} -lunit @EXTRA_LIBS@
diff --git a/src/wasm/nxt_rt_wasmtime.c b/src/wasm/nxt_rt_wasmtime.c
new file mode 100644
index 00000000..99786b89
--- /dev/null
+++ b/src/wasm/nxt_rt_wasmtime.c
@@ -0,0 +1,412 @@
+/*
+ * Copyright (C) Andrew Clayton
+ * Copyright (C) F5, Inc.
+ */
+
+#include <stdio.h>
+#include <stdbool.h>
+#include <stdarg.h>
+
+#include <wasm.h>
+#include <wasi.h>
+#include <wasmtime.h>
+
+#include "nxt_wasm.h"
+
+
+typedef struct nxt_wasmtime_ctx_s nxt_wasmtime_ctx_t;
+
+struct nxt_wasmtime_ctx_s {
+ wasm_engine_t *engine;
+ wasmtime_store_t *store;
+ wasmtime_memory_t memory;
+ wasmtime_module_t *module;
+ wasmtime_linker_t *linker;
+ wasmtime_context_t *ctx;
+};
+
+static nxt_wasmtime_ctx_t nxt_wasmtime_ctx;
+
+
+static void
+nxt_wasmtime_err_msg(wasmtime_error_t *error, wasm_trap_t *trap,
+ const char *fmt, ...)
+{
+ va_list args;
+ wasm_byte_vec_t error_message;
+
+ fprintf(stderr, "WASMTIME ERROR: ");
+ va_start(args, fmt);
+ vfprintf(stderr, fmt, args);
+ va_end(args);
+ fprintf(stderr, "\n");
+
+ if (error == NULL && trap == NULL) {
+ return;
+ }
+
+ if (error != NULL) {
+ wasmtime_error_message(error, &error_message);
+ wasmtime_error_delete(error);
+ } else {
+ wasm_trap_message(trap, &error_message);
+ wasm_trap_delete(trap);
+ }
+ fprintf(stderr, "%.*s\n", (int)error_message.size, error_message.data);
+
+ wasm_byte_vec_delete(&error_message);
+}
+
+
+static wasm_trap_t *
+nxt_wasm_get_init_mem_size(void *env, wasmtime_caller_t *caller,
+ const wasmtime_val_t *args, size_t nargs,
+ wasmtime_val_t *results, size_t nresults)
+{
+ results[0].of.i32 = NXT_WASM_MEM_SIZE;
+
+ return NULL;
+}
+
+
+static wasm_trap_t *
+nxt_wasm_response_end(void *env, wasmtime_caller_t *caller,
+ const wasmtime_val_t *args, size_t nargs,
+ wasmtime_val_t *results, size_t nresults)
+{
+ nxt_wasm_do_response_end(env);
+
+ return NULL;
+}
+
+
+static wasm_trap_t *
+nxt_wasm_send_response(void *env, wasmtime_caller_t *caller,
+ const wasmtime_val_t *args, size_t nargs,
+ wasmtime_val_t *results, size_t nresults)
+{
+ nxt_wasm_do_send_response(env, args[0].of.i32);
+
+ return NULL;
+}
+
+
+static wasm_trap_t *
+nxt_wasm_send_headers(void *env, wasmtime_caller_t *caller,
+ const wasmtime_val_t *args, size_t nargs,
+ wasmtime_val_t *results, size_t nresults)
+{
+ nxt_wasm_do_send_headers(env, args[0].of.i32);
+
+ return NULL;
+}
+
+
+static void
+nxt_wasmtime_execute_hook(const nxt_wasm_ctx_t *ctx, nxt_wasm_fh_t hook)
+{
+ const char *name = ctx->fh[hook].func_name;
+ wasm_trap_t *trap = NULL;
+ wasmtime_error_t *error;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+ const nxt_wasm_func_t *func = &ctx->fh[hook].func;
+
+ if (name == NULL) {
+ return;
+ }
+
+ error = wasmtime_func_call(rt_ctx->ctx, func, NULL, 0, NULL, 0, &trap);
+ if (error != NULL || trap != NULL) {
+ nxt_wasmtime_err_msg(error, trap, "failed to call hook function [%s]",
+ name);
+ }
+}
+
+
+static void
+nxt_wasmtime_execute_request(const nxt_wasm_ctx_t *ctx)
+{
+ int i = 0;
+ wasm_trap_t *trap = NULL;
+ wasmtime_val_t args[1] = { };
+ wasmtime_val_t results[1] = { };
+ wasmtime_error_t *error;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+ const nxt_wasm_func_t *func = &ctx->fh[NXT_WASM_FH_REQUEST].func;
+
+ args[i].kind = WASMTIME_I32;
+ args[i++].of.i32 = ctx->baddr_off;
+
+ error = wasmtime_func_call(rt_ctx->ctx, func, args, i, results, 1, &trap);
+ if (error != NULL || trap != NULL) {
+ nxt_wasmtime_err_msg(error, trap,
+ "failed to call function [->wasm_request_handler]"
+ );
+ }
+}
+
+
+static void
+nxt_wasmtime_set_function_imports(nxt_wasm_ctx_t *ctx)
+{
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+
+ static const struct {
+ const char *func_name;
+
+ wasmtime_func_callback_t func;
+ wasm_valkind_t params[1];
+ wasm_valkind_t results[1];
+
+ enum {
+ NXT_WASM_FT_0_0,
+ NXT_WASM_FT_1_0,
+ NXT_WASM_FT_0_1,
+ } ft;
+ } import_functions[] = {
+ {
+ .func_name = "nxt_wasm_get_init_mem_size",
+ .func = nxt_wasm_get_init_mem_size,
+ .results = { WASM_I32 },
+ .ft = NXT_WASM_FT_0_1
+ }, {
+ .func_name = "nxt_wasm_response_end",
+ .func = nxt_wasm_response_end,
+ .ft = NXT_WASM_FT_0_0
+ }, {
+ .func_name = "nxt_wasm_send_response",
+ .func = nxt_wasm_send_response,
+ .params = { WASM_I32 },
+ .ft = NXT_WASM_FT_1_0
+ }, {
+ .func_name = "nxt_wasm_send_headers",
+ .func = nxt_wasm_send_headers,
+ .params = { WASM_I32 },
+ .ft = NXT_WASM_FT_1_0
+ },
+
+ { }
+ }, *imf;
+
+ for (imf = import_functions; imf->func_name != NULL; imf++) {
+ wasm_functype_t *func_ty;
+
+ switch (imf->ft) {
+ case NXT_WASM_FT_0_0:
+ func_ty = wasm_functype_new_0_0();
+ break;
+ case NXT_WASM_FT_1_0:
+ func_ty = wasm_functype_new_1_0(wasm_valtype_new(imf->params[0]));
+ break;
+ case NXT_WASM_FT_0_1:
+ func_ty = wasm_functype_new_0_1(wasm_valtype_new(imf->results[0]));
+ break;
+ default:
+ /* Stop GCC complaining about func_ty being used uninitialised */
+ func_ty = NULL;
+ }
+
+ wasmtime_linker_define_func(rt_ctx->linker, "env", 3,
+ imf->func_name, strlen(imf->func_name),
+ func_ty, imf->func, ctx, NULL);
+ wasm_functype_delete(func_ty);
+ }
+}
+
+
+static int
+nxt_wasmtime_get_function_exports(nxt_wasm_ctx_t *ctx)
+{
+ int i;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+
+ for (i = 0; i < NXT_WASM_FH_NR; i++) {
+ bool ok;
+ wasmtime_extern_t item;
+
+ if (ctx->fh[i].func_name == NULL) {
+ continue;
+ }
+
+ ok = wasmtime_linker_get(rt_ctx->linker, rt_ctx->ctx, "", 0,
+ ctx->fh[i].func_name,
+ strlen(ctx->fh[i].func_name), &item);
+ if (!ok) {
+ nxt_wasmtime_err_msg(NULL, NULL,
+ "couldn't get (%s) export from module",
+ ctx->fh[i].func_name);
+ return -1;
+ }
+ ctx->fh[i].func = item.of.func;
+ }
+
+ return 0;
+}
+
+
+static int
+nxt_wasmtime_wasi_init(const nxt_wasm_ctx_t *ctx)
+{
+ char **dir;
+ wasi_config_t *wasi_config;
+ wasmtime_error_t *error;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+
+ wasi_config = wasi_config_new();
+
+ wasi_config_inherit_env(wasi_config);
+ wasi_config_inherit_stdin(wasi_config);
+ wasi_config_inherit_stdout(wasi_config);
+ wasi_config_inherit_stderr(wasi_config);
+
+ for (dir = ctx->dirs; dir != NULL && *dir != NULL; dir++) {
+ wasi_config_preopen_dir(wasi_config, *dir, *dir);
+ }
+
+ error = wasmtime_context_set_wasi(rt_ctx->ctx, wasi_config);
+ if (error != NULL) {
+ nxt_wasmtime_err_msg(error, NULL, "failed to instantiate WASI");
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static int
+nxt_wasmtime_init_memory(nxt_wasm_ctx_t *ctx)
+{
+ int i = 0;
+ bool ok;
+ wasm_trap_t *trap = NULL;
+ wasmtime_val_t args[1] = { };
+ wasmtime_val_t results[1] = { };
+ wasmtime_error_t *error;
+ wasmtime_extern_t item;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+ const nxt_wasm_func_t *func = &ctx->fh[NXT_WASM_FH_MALLOC].func;
+
+ args[i].kind = WASMTIME_I32;
+ args[i++].of.i32 = NXT_WASM_MEM_SIZE + NXT_WASM_PAGE_SIZE;
+
+ error = wasmtime_func_call(rt_ctx->ctx, func, args, i, results, 1, &trap);
+ if (error != NULL || trap != NULL) {
+ nxt_wasmtime_err_msg(error, trap,
+ "failed to call function [->wasm_malloc_handler]"
+ );
+ return -1;
+ }
+
+ ok = wasmtime_linker_get(rt_ctx->linker, rt_ctx->ctx, "", 0, "memory",
+ strlen("memory"), &item);
+ if (!ok) {
+ nxt_wasmtime_err_msg(NULL, NULL, "couldn't get 'memory' from module\n");
+ return -1;
+ }
+ rt_ctx->memory = item.of.memory;
+
+ ctx->baddr_off = results[0].of.i32;
+ ctx->baddr = wasmtime_memory_data(rt_ctx->ctx, &rt_ctx->memory);
+
+ ctx->baddr += ctx->baddr_off;
+
+ return 0;
+}
+
+
+static int
+nxt_wasmtime_init(nxt_wasm_ctx_t *ctx)
+{
+ int err;
+ FILE *fp;
+ size_t file_size;
+ wasm_byte_vec_t wasm;
+ wasmtime_error_t *error;
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+
+ rt_ctx->engine = wasm_engine_new();
+ rt_ctx->store = wasmtime_store_new(rt_ctx->engine, NULL, NULL);
+ rt_ctx->ctx = wasmtime_store_context(rt_ctx->store);
+
+ rt_ctx->linker = wasmtime_linker_new(rt_ctx->engine);
+ error = wasmtime_linker_define_wasi(rt_ctx->linker);
+ if (error != NULL) {
+ nxt_wasmtime_err_msg(error, NULL, "failed to link wasi");
+ return -1;
+ }
+
+ fp = fopen(ctx->module_path, "r");
+ if (!fp) {
+ nxt_wasmtime_err_msg(NULL, NULL,
+ "error opening file (%s)", ctx->module_path);
+ return -1;
+ }
+ fseek(fp, 0L, SEEK_END);
+ file_size = ftell(fp);
+ wasm_byte_vec_new_uninitialized(&wasm, file_size);
+ fseek(fp, 0L, SEEK_SET);
+ if (fread(wasm.data, file_size, 1, fp) != 1) {
+ nxt_wasmtime_err_msg(NULL, NULL, "error loading module");
+ fclose(fp);
+ return -1;
+ }
+ fclose(fp);
+
+ error = wasmtime_module_new(rt_ctx->engine, (uint8_t *)wasm.data, wasm.size,
+ &rt_ctx->module);
+ if (!rt_ctx->module) {
+ nxt_wasmtime_err_msg(error, NULL, "failed to compile module");
+ return -1;
+ }
+ wasm_byte_vec_delete(&wasm);
+
+ nxt_wasmtime_set_function_imports(ctx);
+
+ nxt_wasmtime_wasi_init(ctx);
+
+ error = wasmtime_linker_module(rt_ctx->linker, rt_ctx->ctx, "", 0,
+ rt_ctx->module);
+ if (error != NULL) {
+ nxt_wasmtime_err_msg(error, NULL, "failed to instantiate");
+ return -1;
+ }
+
+ err = nxt_wasmtime_get_function_exports(ctx);
+ if (err) {
+ return -1;
+ }
+
+ err = nxt_wasmtime_init_memory(ctx);
+ if (err) {
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static void
+nxt_wasmtime_destroy(const nxt_wasm_ctx_t *ctx)
+{
+ int i = 0;
+ wasmtime_val_t args[1] = { };
+ nxt_wasmtime_ctx_t *rt_ctx = &nxt_wasmtime_ctx;
+ const nxt_wasm_func_t *func = &ctx->fh[NXT_WASM_FH_FREE].func;
+
+ args[i].kind = WASMTIME_I32;
+ args[i++].of.i32 = ctx->baddr_off;
+
+ wasmtime_func_call(rt_ctx->ctx, func, args, i, NULL, 0, NULL);
+
+ wasmtime_module_delete(rt_ctx->module);
+ wasmtime_store_delete(rt_ctx->store);
+ wasm_engine_delete(rt_ctx->engine);
+}
+
+
+const nxt_wasm_operations_t nxt_wasm_ops = {
+ .init = nxt_wasmtime_init,
+ .destroy = nxt_wasmtime_destroy,
+ .exec_request = nxt_wasmtime_execute_request,
+ .exec_hook = nxt_wasmtime_execute_hook,
+};
diff --git a/src/wasm/nxt_wasm.c b/src/wasm/nxt_wasm.c
new file mode 100644
index 00000000..45a40b4b
--- /dev/null
+++ b/src/wasm/nxt_wasm.c
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) Andrew Clayton
+ * Copyright (C) F5, Inc.
+ */
+
+#include <nxt_main.h>
+#include <nxt_application.h>
+#include <nxt_unit.h>
+#include <nxt_unit_request.h>
+
+#include "nxt_wasm.h"
+
+
+#define NXT_WASM_VERSION "0.1"
+
+#define NXT_WASM_DO_HOOK(hook) nxt_wops->exec_hook(&nxt_wasm_ctx, hook);
+
+
+static uint32_t compat[] = {
+ NXT_VERNUM, NXT_DEBUG,
+};
+
+static nxt_wasm_ctx_t nxt_wasm_ctx;
+
+static const nxt_wasm_operations_t *nxt_wops;
+
+
+void
+nxt_wasm_do_response_end(nxt_wasm_ctx_t *ctx)
+{
+ nxt_unit_request_done(ctx->req, NXT_UNIT_OK);
+
+ NXT_WASM_DO_HOOK(NXT_WASM_FH_RESPONSE_END);
+}
+
+
+void
+nxt_wasm_do_send_headers(nxt_wasm_ctx_t *ctx, uint32_t offset)
+{
+ size_t fields_len;
+ unsigned int i;
+ nxt_wasm_response_fields_t *rh;
+
+ rh = (nxt_wasm_response_fields_t *)(ctx->baddr + offset);
+
+ fields_len = 0;
+ for (i = 0; i < rh->nfields; i++) {
+ fields_len += rh->fields[i].name_len + rh->fields[i].value_len;
+ }
+
+ nxt_unit_response_init(ctx->req, 200, rh->nfields, fields_len);
+
+ for (i = 0; i < rh->nfields; i++) {
+ const char *name;
+ const char *val;
+
+ name = (const char *)rh + rh->fields[i].name_off;
+ val = (const char *)rh + rh->fields[i].value_off;
+
+ nxt_unit_response_add_field(ctx->req, name, rh->fields[i].name_len,
+ val, rh->fields[i].value_len);
+ }
+
+ nxt_unit_response_send(ctx->req);
+}
+
+
+void
+nxt_wasm_do_send_response(nxt_wasm_ctx_t *ctx, uint32_t offset)
+{
+ nxt_wasm_response_t *resp;
+ nxt_unit_request_info_t *req = ctx->req;
+
+ if (!nxt_unit_response_is_init(req)) {
+ nxt_unit_response_init(req, 200, 0, 0);
+ }
+
+ resp = (nxt_wasm_response_t *)(nxt_wasm_ctx.baddr + offset);
+
+ nxt_unit_response_write(req, (const char *)resp->data, resp->size);
+}
+
+
+static void
+nxt_wasm_request_handler(nxt_unit_request_info_t *req)
+{
+ size_t offset, read_bytes, content_sent, content_len;
+ ssize_t bytes_read;
+ nxt_unit_field_t *sf, *sf_end;
+ nxt_unit_request_t *r;
+ nxt_wasm_request_t *wr;
+ nxt_wasm_http_field_t *df;
+
+ NXT_WASM_DO_HOOK(NXT_WASM_FH_REQUEST_INIT);
+
+ wr = (nxt_wasm_request_t *)nxt_wasm_ctx.baddr;
+
+#define SET_REQ_MEMBER(dmember, smember) \
+ do { \
+ const char *str = nxt_unit_sptr_get(&r->smember); \
+ wr->dmember##_off = offset; \
+ wr->dmember##_len = strlen(str); \
+ memcpy((uint8_t *)wr + offset, str, wr->dmember##_len + 1); \
+ offset += wr->dmember##_len + 1; \
+ } while (0)
+
+ r = req->request;
+ offset = sizeof(nxt_wasm_request_t)
+ + (r->fields_count * sizeof(nxt_wasm_http_field_t));
+
+ SET_REQ_MEMBER(path, path);
+ SET_REQ_MEMBER(method, method);
+ SET_REQ_MEMBER(version, version);
+ SET_REQ_MEMBER(query, query);
+ SET_REQ_MEMBER(remote, remote);
+ SET_REQ_MEMBER(local_addr, local_addr);
+ SET_REQ_MEMBER(local_port, local_port);
+ SET_REQ_MEMBER(server_name, server_name);
+#undef SET_REQ_MEMBER
+
+ df = wr->fields;
+ sf_end = r->fields + r->fields_count;
+ for (sf = r->fields; sf < sf_end; sf++) {
+ const char *name = nxt_unit_sptr_get(&sf->name);
+ const char *value = nxt_unit_sptr_get(&sf->value);
+
+ df->name_off = offset;
+ df->name_len = strlen(name);
+ memcpy((uint8_t *)wr + offset, name, df->name_len + 1);
+ offset += df->name_len + 1;
+
+ df->value_off = offset;
+ df->value_len = strlen(value);
+ memcpy((uint8_t *)wr + offset, value, df->value_len + 1);
+ offset += df->value_len + 1;
+
+ df++;
+ }
+
+ wr->tls = r->tls;
+ wr->nfields = r->fields_count;
+ wr->content_off = offset;
+ wr->content_len = content_len = r->content_length;
+
+ read_bytes = nxt_min(wr->content_len, NXT_WASM_MEM_SIZE - offset);
+
+ bytes_read = nxt_unit_request_read(req, (uint8_t *)wr + offset, read_bytes);
+ wr->content_sent = wr->total_content_sent = content_sent = bytes_read;
+
+ wr->request_size = offset + bytes_read;
+
+ nxt_wasm_ctx.req = req;
+ nxt_wops->exec_request(&nxt_wasm_ctx);
+
+ if (content_len == content_sent) {
+ goto request_done;
+ }
+
+ wr->nfields = 0;
+ wr->content_off = offset = sizeof(nxt_wasm_request_t);
+ do {
+ read_bytes = nxt_min(content_len - content_sent,
+ NXT_WASM_MEM_SIZE - offset);
+ bytes_read = nxt_unit_request_read(req, (uint8_t *)wr + offset,
+ read_bytes);
+
+ content_sent += bytes_read;
+ wr->request_size = wr->content_sent = bytes_read;
+ wr->total_content_sent = content_sent;
+
+ nxt_wops->exec_request(&nxt_wasm_ctx);
+ } while (content_sent < content_len);
+
+request_done:
+ NXT_WASM_DO_HOOK(NXT_WASM_FH_REQUEST_END);
+}
+
+
+static nxt_int_t
+nxt_wasm_start(nxt_task_t *task, nxt_process_data_t *data)
+{
+ nxt_int_t ret;
+ nxt_unit_ctx_t *unit_ctx;
+ nxt_unit_init_t wasm_init;
+ nxt_common_app_conf_t *conf;
+
+ conf = data->app;
+
+ ret = nxt_unit_default_init(task, &wasm_init, conf);
+ if (nxt_slow_path(ret != NXT_OK)) {
+ nxt_alert(task, "nxt_unit_default_init() failed");
+ return ret;
+ }
+
+ wasm_init.callbacks.request_handler = nxt_wasm_request_handler;
+
+ unit_ctx = nxt_unit_init(&wasm_init);
+ if (nxt_slow_path(unit_ctx == NULL)) {
+ return NXT_ERROR;
+ }
+
+ NXT_WASM_DO_HOOK(NXT_WASM_FH_MODULE_INIT);
+ nxt_unit_run(unit_ctx);
+ nxt_unit_done(unit_ctx);
+ NXT_WASM_DO_HOOK(NXT_WASM_FH_MODULE_END);
+
+ if (nxt_wasm_ctx.dirs != NULL) {
+ char **p;
+
+ for (p = nxt_wasm_ctx.dirs; *p != NULL; p++) {
+ nxt_free(*p);
+ }
+ nxt_free(nxt_wasm_ctx.dirs);
+ }
+
+ nxt_wops->destroy(&nxt_wasm_ctx);
+
+ exit(EXIT_SUCCESS);
+}
+
+
+static nxt_int_t
+nxt_wasm_setup(nxt_task_t *task, nxt_process_t *process,
+ nxt_common_app_conf_t *conf)
+{
+ int n, i, err;
+ nxt_conf_value_t *dirs = NULL;
+ nxt_wasm_app_conf_t *c;
+ nxt_wasm_func_handler_t *fh;
+ static nxt_str_t filesystem_str = nxt_string("filesystem");
+
+ c = &conf->u.wasm;
+
+ nxt_wops = &nxt_wasm_ops;
+
+ nxt_wasm_ctx.module_path = c->module;
+
+ fh = nxt_wasm_ctx.fh;
+
+ fh[NXT_WASM_FH_REQUEST].func_name = c->request_handler;
+ fh[NXT_WASM_FH_MALLOC].func_name = c->malloc_handler;
+ fh[NXT_WASM_FH_FREE].func_name = c->free_handler;
+
+ /* Optional function handlers (hooks) */
+ fh[NXT_WASM_FH_MODULE_INIT].func_name = c->module_init_handler;
+ fh[NXT_WASM_FH_MODULE_END].func_name = c->module_end_handler;
+ fh[NXT_WASM_FH_REQUEST_INIT].func_name = c->request_init_handler;
+ fh[NXT_WASM_FH_REQUEST_END].func_name = c->request_end_handler;
+ fh[NXT_WASM_FH_RESPONSE_END].func_name = c->response_end_handler;
+
+ /* Get any directories to pass through to the WASM module */
+ if (c->access != NULL) {
+ dirs = nxt_conf_get_object_member(c->access, &filesystem_str, NULL);
+ }
+
+ n = (dirs != NULL) ? nxt_conf_object_members_count(dirs) : 0;
+ if (n == 0) {
+ goto out_init;
+ }
+
+ nxt_wasm_ctx.dirs = nxt_zalloc((n + 1) * sizeof(char *));
+ if (nxt_slow_path(nxt_wasm_ctx.dirs == NULL)) {
+ return NXT_ERROR;
+ }
+
+ for (i = 0; i < n; i++) {
+ nxt_str_t str;
+ nxt_conf_value_t *value;
+
+ value = nxt_conf_get_array_element(dirs, i);
+ nxt_conf_get_string(value, &str);
+
+ nxt_wasm_ctx.dirs[i] = nxt_zalloc(str.length + 1);
+ memcpy(nxt_wasm_ctx.dirs[i], str.start, str.length);
+ }
+
+out_init:
+ err = nxt_wops->init(&nxt_wasm_ctx);
+ if (err) {
+ exit(EXIT_FAILURE);
+ }
+
+ return NXT_OK;
+}
+
+
+NXT_EXPORT nxt_app_module_t nxt_app_module = {
+ .compat_length = sizeof(compat),
+ .compat = compat,
+ .type = nxt_string("wasm"),
+ .version = NXT_WASM_VERSION,
+ .mounts = NULL,
+ .nmounts = 0,
+ .setup = nxt_wasm_setup,
+ .start = nxt_wasm_start,
+};
diff --git a/src/wasm/nxt_wasm.h b/src/wasm/nxt_wasm.h
new file mode 100644
index 00000000..cb9dbdfe
--- /dev/null
+++ b/src/wasm/nxt_wasm.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) Andrew Clayton
+ * Copyright (C) F5, Inc.
+ */
+
+#ifndef _NXT_WASM_H_INCLUDED_
+#define _NXT_WASM_H_INCLUDED_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <nxt_unit.h>
+
+#include <wasm.h>
+#if defined(NXT_HAVE_WASM_WASMTIME)
+#include <wasmtime.h>
+#endif
+
+
+#define NXT_WASM_PAGE_SIZE (64 * 1024)
+#define NXT_WASM_MEM_SIZE (32UL * 1024 * 1024)
+
+#if defined(NXT_HAVE_WASM_WASMTIME)
+typedef wasmtime_func_t nxt_wasm_func_t;
+#endif
+
+
+typedef struct nxt_wasm_http_field_s nxt_wasm_http_field_t;
+typedef struct nxt_wasm_request_s nxt_wasm_request_t;
+typedef struct nxt_wasm_response_s nxt_wasm_response_t;
+typedef struct nxt_wasm_response_fields_s nxt_wasm_response_fields_t;
+typedef enum nxt_wasm_fh_e nxt_wasm_fh_t;
+typedef struct nxt_wasm_func_handler_s nxt_wasm_func_handler_t;
+typedef struct nxt_wasm_ctx_s nxt_wasm_ctx_t;
+typedef struct nxt_wasm_operations_s nxt_wasm_operations_t;
+
+struct nxt_wasm_http_field_s {
+ uint32_t name_off;
+ uint32_t name_len;
+ uint32_t value_off;
+ uint32_t value_len;
+};
+
+struct nxt_wasm_request_s {
+ uint32_t method_off;
+ uint32_t method_len;
+ uint32_t version_off;
+ uint32_t version_len;
+ uint32_t path_off;
+ uint32_t path_len;
+ uint32_t query_off;
+ uint32_t query_len;
+ uint32_t remote_off;
+ uint32_t remote_len;
+ uint32_t local_addr_off;
+ uint32_t local_addr_len;
+ uint32_t local_port_off;
+ uint32_t local_port_len;
+ uint32_t server_name_off;
+ uint32_t server_name_len;
+
+ uint32_t content_off;
+ uint32_t content_len;
+ uint32_t content_sent;
+ uint32_t total_content_sent;
+
+ uint32_t request_size;
+
+ uint32_t nfields;
+
+ uint32_t tls;
+
+ nxt_wasm_http_field_t fields[];
+};
+
+struct nxt_wasm_response_s {
+ uint32_t size;
+
+ uint8_t data[];
+};
+
+struct nxt_wasm_response_fields_s {
+ uint32_t nfields;
+
+ nxt_wasm_http_field_t fields[];
+};
+
+enum nxt_wasm_fh_e {
+ NXT_WASM_FH_REQUEST = 0,
+ NXT_WASM_FH_MALLOC,
+ NXT_WASM_FH_FREE,
+
+ /* Optional handlers */
+ NXT_WASM_FH_MODULE_INIT,
+ NXT_WASM_FH_MODULE_END,
+ NXT_WASM_FH_REQUEST_INIT,
+ NXT_WASM_FH_REQUEST_END,
+ NXT_WASM_FH_RESPONSE_END,
+
+ NXT_WASM_FH_NR
+};
+
+struct nxt_wasm_func_handler_s {
+ const char *func_name;
+ nxt_wasm_func_t func;
+};
+
+struct nxt_wasm_ctx_s {
+ const char *module_path;
+
+ nxt_wasm_func_handler_t fh[NXT_WASM_FH_NR];
+
+ char **dirs;
+
+ nxt_unit_request_info_t *req;
+
+ uint8_t *baddr;
+ size_t baddr_off;
+
+ size_t response_off;
+};
+
+struct nxt_wasm_operations_s {
+ int (*init)(nxt_wasm_ctx_t *ctx);
+ void (*destroy)(const nxt_wasm_ctx_t *ctx);
+ void (*exec_request)(const nxt_wasm_ctx_t *ctx);
+ void (*exec_hook)(const nxt_wasm_ctx_t *ctx, nxt_wasm_fh_t hook);
+};
+
+extern const nxt_wasm_operations_t nxt_wasm_ops;
+
+
+/* Exported to the WASM module */
+extern void nxt_wasm_do_response_end(nxt_wasm_ctx_t *ctx);
+extern void nxt_wasm_do_send_response(nxt_wasm_ctx_t *ctx, uint32_t offset);
+extern void nxt_wasm_do_send_headers(nxt_wasm_ctx_t *ctx, uint32_t offset);
+
+#endif /* _NXT_WASM_H_INCLUDED_ */
diff --git a/test/conftest.py b/test/conftest.py
index 926d83f8..8d2850fd 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -2,7 +2,6 @@ import fcntl
import inspect
import json
import os
-import platform
import re
import shutil
import signal
@@ -14,16 +13,11 @@ import time
from multiprocessing import Process
import pytest
-from unit.check.chroot import check_chroot
-from unit.check.go import check_go
-from unit.check.isolation import check_isolation
-from unit.check.njs import check_njs
-from unit.check.node import check_node
-from unit.check.regex import check_regex
-from unit.check.tls import check_openssl
-from unit.check.unix_abstract import check_unix_abstract
-from unit.http import TestHTTP
+from unit.check.discover_available import discover_available
+from unit.check.check_prerequisites import check_prerequisites
+from unit.http import HTTP1
from unit.log import Log
+from unit.log import print_log_on_assert
from unit.option import option
from unit.status import Status
from unit.utils import check_findmnt
@@ -88,7 +82,7 @@ _fds_info = {
'skip': False,
},
}
-http = TestHTTP()
+http = HTTP1()
is_findmnt = check_findmnt()
@@ -108,8 +102,6 @@ def pytest_configure(config):
os.path.join(os.path.dirname(__file__), os.pardir)
)
option.test_dir = f'{option.current_dir}/test'
- option.architecture = platform.architecture()[0]
- option.system = platform.system()
option.cache_dir = tempfile.mkdtemp(prefix='unit-test-cache-')
public_dir(option.cache_dir)
@@ -120,124 +112,75 @@ def pytest_configure(config):
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0)
-def print_log_on_assert(func):
- def inner_function(*args, **kwargs):
- try:
- func(*args, **kwargs)
- except AssertionError as e:
- _print_log(kwargs.get('log', None))
- raise e
-
- return inner_function
-
-
def pytest_generate_tests(metafunc):
- cls = metafunc.cls
+ module = metafunc.module
if (
- not hasattr(cls, 'application_type')
- or cls.application_type == None
- or cls.application_type == 'external'
+ not hasattr(module, 'client')
+ or not hasattr(module.client, 'application_type')
+ or module.client.application_type is None
+ or module.client.application_type == 'external'
):
return
- type = cls.application_type
+ app_type = module.client.application_type
def generate_tests(versions):
+ if not versions:
+ pytest.skip('no available module versions')
+
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', versions)
for version in versions:
option.generated_tests[
f'{metafunc.function.__name__} [{version}]'
- ] = f'{type} {version}'
+ ] = f'{app_type} {version}'
# take available module from option and generate tests for each version
- for module, prereq_version in cls.prerequisites['modules'].items():
- if module in option.available['modules']:
- available_versions = option.available['modules'][module]
+ available_modules = option.available['modules']
+
+ for module, version in metafunc.module.prerequisites['modules'].items():
+ if module in available_modules and available_modules[module]:
+ available_versions = available_modules[module]
- if prereq_version == 'all':
+ if version == 'all':
generate_tests(available_versions)
- elif prereq_version == 'any':
+ elif version == 'any':
option.generated_tests[
metafunc.function.__name__
- ] = f'{type} {available_versions[0]}'
- elif callable(prereq_version):
- generate_tests(list(filter(prereq_version, available_versions)))
+ ] = f'{app_type} {available_versions[0]}'
+ elif callable(version):
+ generate_tests(list(filter(version, available_versions)))
else:
raise ValueError(
f'''
-Unexpected prerequisite version "{prereq_version}" for module "{module}" in
-{cls}. 'all', 'any' or callable expected.'''
+Unexpected prerequisite version "{version}" for module "{module}".
+'all', 'any' or callable expected.'''
)
-def pytest_sessionstart(session):
- option.available = {'modules': {}, 'features': {}}
-
+def pytest_sessionstart():
unit = unit_run()
- output_version = subprocess.check_output(
- [unit['unitd'], '--version'], stderr=subprocess.STDOUT
- ).decode()
-
- # read unit.log
-
- for i in range(50):
- with open(Log.get_path(), 'r') as f:
- log = f.read()
- m = re.search('controller started', log)
-
- if m is None:
- time.sleep(0.1)
- else:
- break
-
- if m is None:
- _print_log(log)
- exit("Unit is writing log too long")
-
- # discover available modules from unit.log
- for module in re.findall(r'module: ([a-zA-Z]+) (.*) ".*"$', log, re.M):
- versions = option.available['modules'].setdefault(module[0], [])
- if module[1] not in versions:
- versions.append(module[1])
+ discover_available(unit)
- # discover modules from check
-
- option.available['modules']['go'] = check_go()
- option.available['modules']['njs'] = check_njs(output_version)
- option.available['modules']['node'] = check_node(option.current_dir)
- option.available['modules']['openssl'] = check_openssl(output_version)
- option.available['modules']['regex'] = check_regex(output_version)
-
- # remove None values
-
- option.available['modules'] = {
- k: v for k, v in option.available['modules'].items() if v is not None
- }
-
- check_chroot()
- check_isolation()
- check_unix_abstract()
-
- _clear_conf(f'{unit["temp_dir"]}/control.unit.sock')
+ _clear_conf()
unit_stop()
- _check_alerts()
+ Log.check_alerts()
if option.restart:
- shutil.rmtree(unit_instance['temp_dir'])
+ shutil.rmtree(unit['temp_dir'])
else:
_clear_temp_dir()
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
-def pytest_runtest_makereport(item, call):
+def pytest_runtest_makereport(item):
# execute all other hooks to obtain the report object
outcome = yield
rep = outcome.get_result()
@@ -248,38 +191,10 @@ def pytest_runtest_makereport(item, call):
setattr(item, f'rep_{rep.when}', rep)
-@pytest.fixture(scope='class', autouse=True)
-def check_prerequisites(request):
- cls = request.cls
- missed = []
-
- # check modules
-
- if 'modules' in cls.prerequisites:
- available_modules = list(option.available['modules'].keys())
-
- for module in cls.prerequisites['modules']:
- if module in available_modules:
- continue
-
- missed.append(module)
-
- if missed:
- pytest.skip(f'Unit has no {", ".join(missed)} module(s)')
-
- # check features
-
- if 'features' in cls.prerequisites:
- available_features = list(option.available['features'].keys())
-
- for feature in cls.prerequisites['features']:
- if feature in available_features:
- continue
-
- missed.append(feature)
-
- if missed:
- pytest.skip(f'{", ".join(missed)} feature(s) not supported')
+@pytest.fixture(scope='module', autouse=True)
+def check_prerequisites_module(request):
+ if hasattr(request.module, 'prerequisites'):
+ check_prerequisites(request.module.prerequisites)
@pytest.fixture(autouse=True)
@@ -306,7 +221,7 @@ def run(request):
# prepare log
- with Log.open(encoding='utf-8') as f:
+ with Log.open() as f:
log = f.read()
Log.set_pos(f.tell())
@@ -317,7 +232,7 @@ def run(request):
# clean temp_dir before the next test
if not option.restart:
- _clear_conf(f'{unit["temp_dir"]}/control.unit.sock', log=log)
+ _clear_conf(log=log)
_clear_temp_dir()
# check descriptors
@@ -331,17 +246,17 @@ def run(request):
# print unit.log in case of error
if hasattr(request.node, 'rep_call') and request.node.rep_call.failed:
- _print_log(log)
+ Log.print_log(log)
if error_stop_unit or error_stop_processes:
- _print_log(log)
+ Log.print_log(log)
# check unit.log for errors
assert error_stop_unit is None, 'stop unit'
assert error_stop_processes is None, 'stop processes'
- _check_alerts(log=log)
+ Log.check_alerts(log=log)
def unit_run(state_dir=None):
@@ -360,6 +275,7 @@ def unit_run(state_dir=None):
exit('Could not find unit')
temp_dir = tempfile.mkdtemp(prefix='unit-test-')
+ option.temp_dir = temp_dir
public_dir(temp_dir)
if oct(stat.S_IMODE(os.stat(builddir).st_mode)) != '0o777':
@@ -394,23 +310,19 @@ def unit_run(state_dir=None):
with open(f'{temp_dir}/unit.log', 'w') as log:
unit_instance['process'] = subprocess.Popen(unitd_args, stderr=log)
- Log.temp_dir = temp_dir
-
if not waitforfiles(control_sock):
- _print_log()
+ Log.print_log()
exit('Could not start unit')
unit_instance['temp_dir'] = temp_dir
unit_instance['control_sock'] = control_sock
unit_instance['unitd'] = unitd
- option.temp_dir = temp_dir
-
with open(f'{temp_dir}/unit.pid', 'r') as f:
unit_instance['pid'] = f.read().rstrip()
if state_dir is None:
- _clear_conf(control_sock)
+ _clear_conf()
_fds_info['main']['fds'] = _count_fds(unit_instance['pid'])
@@ -466,54 +378,9 @@ def unit_stop():
@print_log_on_assert
-def _check_alerts(*, log=None):
- if log is None:
- with Log.open(encoding='utf-8') as f:
- log = f.read()
-
- found = False
- alerts = re.findall(r'.+\[alert\].+', log)
-
- if alerts:
- found = True
-
- if option.detailed:
- print('\nAll alerts/sanitizer errors found in log:')
- [print(alert) for alert in alerts]
-
- if option.skip_alerts:
- for skip in option.skip_alerts:
- alerts = [al for al in alerts if re.search(skip, al) is None]
-
- assert not alerts, 'alert(s)'
-
- if not option.skip_sanitizer:
- sanitizer_errors = re.findall('.+Sanitizer.+', log)
-
- assert not sanitizer_errors, 'sanitizer error(s)'
-
- if found and option.detailed:
- print('skipped.')
-
-
-def _print_log(log=None):
- path = Log.get_path()
+def _clear_conf(*, log=None):
+ sock = unit_instance['control_sock']
- print(f'Path to unit.log:\n{path}\n')
-
- if option.print_log:
- os.set_blocking(sys.stdout.fileno(), True)
- sys.stdout.flush()
-
- if log is None:
- with open(path, 'r', encoding='utf-8', errors='ignore') as f:
- shutil.copyfileobj(f, sys.stdout)
- else:
- sys.stdout.write(log)
-
-
-@print_log_on_assert
-def _clear_conf(sock, *, log=None):
resp = http.put(
url='/config',
sock_type='unix',
@@ -529,7 +396,10 @@ def _clear_conf(sock, *, log=None):
def delete(url):
return http.delete(url=url, sock_type='unix', addr=sock)['body']
- if 'openssl' in option.available['modules']:
+ if (
+ 'openssl' in option.available['modules']
+ and option.available['modules']['openssl']
+ ):
try:
certs = json.loads(get('/certificates')).keys()
@@ -539,7 +409,10 @@ def _clear_conf(sock, *, log=None):
for cert in certs:
assert 'success' in delete(f'/certificates/{cert}'), 'delete cert'
- if 'njs' in option.available['modules']:
+ if (
+ 'njs' in option.available['modules']
+ and option.available['modules']['njs']
+ ):
try:
scripts = json.loads(get('/js_modules')).keys()
@@ -549,6 +422,7 @@ def _clear_conf(sock, *, log=None):
for script in scripts:
assert 'success' in delete(f'/js_modules/{script}'), 'delete script'
+
def _clear_temp_dir():
temp_dir = unit_instance['temp_dir']
@@ -567,12 +441,14 @@ def _clear_temp_dir():
if os.path.isfile(path) or stat.S_ISSOCK(os.stat(path).st_mode):
os.remove(path)
else:
- for attempt in range(10):
+ for _ in range(10):
try:
shutil.rmtree(path)
break
except OSError as err:
- if err.errno != 16:
+ # OSError: [Errno 16] Device or resource busy
+ # OSError: [Errno 39] Directory not empty
+ if err.errno not in [16, 39]:
raise
time.sleep(1)
@@ -582,7 +458,7 @@ def _check_processes():
controller_pid = _fds_info['controller']['pid']
unit_pid = unit_instance['pid']
- for i in range(600):
+ for _ in range(600):
out = (
subprocess.check_output(
['ps', '-ax', '-o', 'pid', '-o', 'ppid', '-o', 'command']
@@ -625,7 +501,7 @@ def _check_processes():
@print_log_on_assert
def _check_fds(*, log=None):
def waitforfds(diff):
- for i in range(600):
+ for _ in range(600):
fds_diff = diff()
if fds_diff <= option.fds_threshold:
@@ -729,6 +605,66 @@ def find_proc(name, ps_output):
return re.findall(f'{unit_instance["pid"]}.*{name}', ps_output)
+def pytest_sessionfinish():
+ if not option.restart and option.save_log:
+ Log.print_path()
+
+ option.restart = True
+
+ unit_stop()
+
+ public_dir(option.cache_dir)
+ shutil.rmtree(option.cache_dir)
+
+ if not option.save_log and os.path.isdir(option.temp_dir):
+ public_dir(option.temp_dir)
+ shutil.rmtree(option.temp_dir)
+
+
+@pytest.fixture
+def date_to_sec_epoch():
+ def _date_to_sec_epoch(date, template='%a, %d %b %Y %X %Z'):
+ return time.mktime(time.strptime(date, template))
+
+ return _date_to_sec_epoch
+
+
+@pytest.fixture
+def findall():
+ def _findall(*args, **kwargs):
+ return Log.findall(*args, **kwargs)
+
+ return _findall
+
+
+@pytest.fixture
+def is_su():
+ return option.is_privileged
+
+
+@pytest.fixture
+def is_unsafe(request):
+ return request.config.getoption("--unsafe")
+
+
+@pytest.fixture
+def require():
+ return check_prerequisites
+
+
+@pytest.fixture
+def search_in_file():
+ def _search_in_file(pattern, name='unit.log', flags=re.M):
+ return re.search(pattern, Log.read(name), flags)
+
+ return _search_in_file
+
+
+@pytest.fixture
+def sec_epoch():
+ return time.mktime(time.gmtime())
+
+
@pytest.fixture()
def skip_alert():
def _skip(*alerts):
@@ -747,37 +683,24 @@ def skip_fds_check():
return _skip
-@pytest.fixture
-def temp_dir(request):
- return unit_instance['temp_dir']
-
-
-@pytest.fixture
-def is_unsafe(request):
- return request.config.getoption("--unsafe")
+@pytest.fixture()
+def system():
+ return option.system
@pytest.fixture
-def is_su(request):
- return os.geteuid() == 0
+def temp_dir():
+ return unit_instance['temp_dir']
@pytest.fixture
-def unit_pid(request):
+def unit_pid():
return unit_instance['process'].pid
-def pytest_sessionfinish(session):
- if not option.restart and option.save_log:
- print(f'Path to unit.log:\n{Log.get_path()}\n')
-
- option.restart = True
-
- unit_stop()
-
- public_dir(option.cache_dir)
- shutil.rmtree(option.cache_dir)
+@pytest.fixture
+def wait_for_record():
+ def _wait_for_record(*args, **kwargs):
+ return Log.wait_for_record(*args, **kwargs)
- if not option.save_log and os.path.isdir(option.temp_dir):
- public_dir(option.temp_dir)
- shutil.rmtree(option.temp_dir)
+ return _wait_for_record
diff --git a/test/python/chunked/wsgi.py b/test/python/chunked/wsgi.py
new file mode 100644
index 00000000..23ee81fc
--- /dev/null
+++ b/test/python/chunked/wsgi.py
@@ -0,0 +1,18 @@
+def application(environ, start_response):
+
+ content_length = int(environ.get('CONTENT_LENGTH', 0))
+ body = bytes(environ['wsgi.input'].read(content_length))
+
+ header_transfer = environ.get('HTTP_X_TRANSFER')
+ header_length = environ.get('HTTP_X_LENGTH')
+
+ headers = []
+
+ if header_length:
+ headers.append(('Content-Length', '0'))
+
+ if header_transfer:
+ headers.append(('Transfer-Encoding', header_transfer))
+
+ start_response('200', headers)
+ return [body]
diff --git a/test/test_access_log.py b/test/test_access_log.py
index c29638a3..bccea56f 100644
--- a/test/test_access_log.py
+++ b/test/test_access_log.py
@@ -1,62 +1,63 @@
import time
import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'any'}}
-class TestAccessLog(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def load(self, script):
- super().load(script)
- assert 'success' in self.conf(
- f'"{option.temp_dir}/access.log"', 'access_log'
- ), 'access_log configure'
+def load(script):
+ client.load(script)
- def set_format(self, format):
- assert 'success' in self.conf(
- {
- 'path': f'{option.temp_dir}/access.log',
- 'format': format,
- },
- 'access_log',
- ), 'access_log format'
+ assert 'success' in client.conf(
+ f'"{option.temp_dir}/access.log"', 'access_log'
+ ), 'access_log configure'
- def wait_for_record(self, pattern, name='access.log'):
- return super().wait_for_record(pattern, name)
- def test_access_log_keepalive(self):
- self.load('mirror')
+def set_format(format):
+ assert 'success' in client.conf(
+ {
+ 'path': f'{option.temp_dir}/access.log',
+ 'format': format,
+ },
+ 'access_log',
+ ), 'access_log format'
- assert self.get()['status'] == 200, 'init'
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body='01234',
- read_timeout=1,
- )
+def test_access_log_keepalive(wait_for_record):
+ load('mirror')
+
+ assert client.get()['status'] == 200, 'init'
+
+ (_, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body='01234',
+ read_timeout=1,
+ )
- assert (
- self.wait_for_record(r'"POST / HTTP/1.1" 200 5') is not None
- ), 'keepalive 1'
+ assert (
+ wait_for_record(r'"POST / HTTP/1.1" 200 5', 'access.log') is not None
+ ), 'keepalive 1'
- resp = self.post(sock=sock, body='0123456789')
+ _ = client.post(sock=sock, body='0123456789')
- assert (
- self.wait_for_record(r'"POST / HTTP/1.1" 200 10') is not None
- ), 'keepalive 2'
+ assert (
+ wait_for_record(r'"POST / HTTP/1.1" 200 10', 'access.log') is not None
+ ), 'keepalive 2'
- def test_access_log_pipeline(self):
- self.load('empty')
- self.http(
- b"""GET / HTTP/1.1
+def test_access_log_pipeline(wait_for_record):
+ load('empty')
+
+ client.http(
+ b"""GET / HTTP/1.1
Host: localhost
Referer: Referer-1
@@ -70,235 +71,254 @@ Referer: Referer-3
Connection: close
""",
- raw_resp=True,
- raw=True,
+ raw_resp=True,
+ raw=True,
+ )
+
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-1" "-"', 'access.log')
+ is not None
+ ), 'pipeline 1'
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-2" "-"', 'access.log')
+ is not None
+ ), 'pipeline 2'
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-3" "-"', 'access.log')
+ is not None
+ ), 'pipeline 3'
+
+
+def test_access_log_ipv6(wait_for_record):
+ load('empty')
+
+ assert 'success' in client.conf(
+ {"[::1]:7080": {"pass": "applications/empty"}}, 'listeners'
+ )
+
+ client.get(sock_type='ipv6')
+
+ assert (
+ wait_for_record(
+ r'::1 - - \[.+\] "GET / HTTP/1.1" 200 0 "-" "-"', 'access.log'
)
+ is not None
+ ), 'ipv6'
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-1" "-"')
- is not None
- ), 'pipeline 1'
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-2" "-"')
- is not None
- ), 'pipeline 2'
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "Referer-3" "-"')
- is not None
- ), 'pipeline 3'
-
- def test_access_log_ipv6(self):
- self.load('empty')
-
- assert 'success' in self.conf(
- {"[::1]:7080": {"pass": "applications/empty"}}, 'listeners'
- )
- self.get(sock_type='ipv6')
+def test_access_log_unix(temp_dir, wait_for_record):
+ load('empty')
- assert (
- self.wait_for_record(
- r'::1 - - \[.+\] "GET / HTTP/1.1" 200 0 "-" "-"'
- )
- is not None
- ), 'ipv6'
+ addr = f'{temp_dir}/sock'
- def test_access_log_unix(self, temp_dir):
- self.load('empty')
+ assert 'success' in client.conf(
+ {f'unix:{addr}': {"pass": "applications/empty"}}, 'listeners'
+ )
- addr = f'{temp_dir}/sock'
+ client.get(sock_type='unix', addr=addr)
- assert 'success' in self.conf(
- {f'unix:{addr}': {"pass": "applications/empty"}}, 'listeners'
+ assert (
+ wait_for_record(
+ r'unix: - - \[.+\] "GET / HTTP/1.1" 200 0 "-" "-"', 'access.log'
)
+ is not None
+ ), 'unix'
- self.get(sock_type='unix', addr=addr)
- assert (
- self.wait_for_record(
- r'unix: - - \[.+\] "GET / HTTP/1.1" 200 0 "-" "-"'
- )
- is not None
- ), 'unix'
+def test_access_log_referer(wait_for_record):
+ load('empty')
- def test_access_log_referer(self):
- self.load('empty')
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Referer': 'referer-value',
+ 'Connection': 'close',
+ }
+ )
- self.get(
- headers={
- 'Host': 'localhost',
- 'Referer': 'referer-value',
- 'Connection': 'close',
- }
+ assert (
+ wait_for_record(
+ r'"GET / HTTP/1.1" 200 0 "referer-value" "-"', 'access.log'
)
+ is not None
+ ), 'referer'
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "referer-value" "-"')
- is not None
- ), 'referer'
- def test_access_log_user_agent(self):
- self.load('empty')
+def test_access_log_user_agent(wait_for_record):
+ load('empty')
- self.get(
- headers={
- 'Host': 'localhost',
- 'User-Agent': 'user-agent-value',
- 'Connection': 'close',
- }
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'User-Agent': 'user-agent-value',
+ 'Connection': 'close',
+ }
+ )
+
+ assert (
+ wait_for_record(
+ r'"GET / HTTP/1.1" 200 0 "-" "user-agent-value"', 'access.log'
)
+ is not None
+ ), 'user agent'
+
+
+def test_access_log_http10(wait_for_record):
+ load('empty')
+
+ client.get(http_10=True)
- assert (
- self.wait_for_record(
- r'"GET / HTTP/1.1" 200 0 "-" "user-agent-value"'
- )
- is not None
- ), 'user agent'
+ assert (
+ wait_for_record(r'"GET / HTTP/1.0" 200 0 "-" "-"', 'access.log')
+ is not None
+ ), 'http 1.0'
- def test_access_log_http10(self):
- self.load('empty')
- self.get(http_10=True)
+def test_access_log_partial(wait_for_record):
+ load('empty')
- assert (
- self.wait_for_record(r'"GET / HTTP/1.0" 200 0 "-" "-"') is not None
- ), 'http 1.0'
+ assert client.post()['status'] == 200, 'init'
- def test_access_log_partial(self):
- self.load('empty')
+ _ = client.http(b"""GE""", raw=True, read_timeout=1)
- assert self.post()['status'] == 200, 'init'
+ time.sleep(1)
- resp = self.http(b"""GE""", raw=True, read_timeout=1)
+ assert (
+ wait_for_record(r'"-" 400 0 "-" "-"', 'access.log') is not None
+ ), 'partial'
- time.sleep(1)
- assert (
- self.wait_for_record(r'"-" 400 0 "-" "-"') is not None
- ), 'partial'
+def test_access_log_partial_2(wait_for_record):
+ load('empty')
- def test_access_log_partial_2(self):
- self.load('empty')
+ assert client.post()['status'] == 200, 'init'
- assert self.post()['status'] == 200, 'init'
+ client.http(b"""GET /\n""", raw=True)
- self.http(b"""GET /\n""", raw=True)
+ assert (
+ wait_for_record(r'"-" 400 \d+ "-" "-"', 'access.log') is not None
+ ), 'partial 2'
- assert (
- self.wait_for_record(r'"-" 400 \d+ "-" "-"') is not None
- ), 'partial 2'
- def test_access_log_partial_3(self):
- self.load('empty')
+def test_access_log_partial_3(wait_for_record):
+ load('empty')
- assert self.post()['status'] == 200, 'init'
+ assert client.post()['status'] == 200, 'init'
- resp = self.http(b"""GET / HTTP/1.1""", raw=True, read_timeout=1)
+ _ = client.http(b"""GET / HTTP/1.1""", raw=True, read_timeout=1)
- time.sleep(1)
+ time.sleep(1)
- assert (
- self.wait_for_record(r'"-" 400 0 "-" "-"') is not None
- ), 'partial 3'
+ assert (
+ wait_for_record(r'"-" 400 0 "-" "-"', 'access.log') is not None
+ ), 'partial 3'
- def test_access_log_partial_4(self):
- self.load('empty')
- assert self.post()['status'] == 200, 'init'
+def test_access_log_partial_4(wait_for_record):
+ load('empty')
- resp = self.http(b"""GET / HTTP/1.1\n""", raw=True, read_timeout=1)
+ assert client.post()['status'] == 200, 'init'
- time.sleep(1)
+ _ = client.http(b"""GET / HTTP/1.1\n""", raw=True, read_timeout=1)
- assert (
- self.wait_for_record(r'"-" 400 0 "-" "-"') is not None
- ), 'partial 4'
+ time.sleep(1)
- @pytest.mark.skip('not yet')
- def test_access_log_partial_5(self):
- self.load('empty')
+ assert (
+ wait_for_record(r'"-" 400 0 "-" "-"', 'access.log') is not None
+ ), 'partial 4'
- assert self.post()['status'] == 200, 'init'
- self.get(headers={'Connection': 'close'})
+@pytest.mark.skip('not yet')
+def test_access_log_partial_5(wait_for_record):
+ load('empty')
+
+ assert client.post()['status'] == 200, 'init'
+
+ client.get(headers={'Connection': 'close'})
+
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 400 \d+ "-" "-"', 'access.log')
+ is not None
+ ), 'partial 5'
+
+
+def test_access_log_get_parameters(wait_for_record):
+ load('empty')
+
+ client.get(url='/?blah&var=val')
+
+ assert (
+ wait_for_record(
+ r'"GET /\?blah&var=val HTTP/1.1" 200 0 "-" "-"', 'access.log'
+ )
+ is not None
+ ), 'get parameters'
+
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 400 \d+ "-" "-"')
- is not None
- ), 'partial 5'
+def test_access_log_delete(search_in_file):
+ load('empty')
- def test_access_log_get_parameters(self):
- self.load('empty')
+ assert 'success' in client.conf_delete('access_log')
- self.get(url='/?blah&var=val')
+ client.get(url='/delete')
- assert (
- self.wait_for_record(
- r'"GET /\?blah&var=val HTTP/1.1" 200 0 "-" "-"'
- )
- is not None
- ), 'get parameters'
+ assert search_in_file(r'/delete', 'access.log') is None, 'delete'
- def test_access_log_delete(self):
- self.load('empty')
- assert 'success' in self.conf_delete('access_log')
+def test_access_log_change(temp_dir, wait_for_record):
+ load('empty')
- self.get(url='/delete')
+ client.get()
- assert self.search_in_log(r'/delete', 'access.log') is None, 'delete'
+ assert 'success' in client.conf(f'"{temp_dir}/new.log"', 'access_log')
- def test_access_log_change(self, temp_dir):
- self.load('empty')
+ client.get()
- self.get()
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 200 0 "-" "-"', 'new.log')
+ is not None
+ ), 'change'
- assert 'success' in self.conf(f'"{temp_dir}/new.log"', 'access_log')
- self.get()
+def test_access_log_format(wait_for_record):
+ load('empty')
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "-" "-"', 'new.log')
- is not None
- ), 'change'
+ def check_format(format, expect, url='/'):
+ set_format(format)
- def test_access_log_format(self):
- self.load('empty')
+ assert client.get(url=url)['status'] == 200
+ assert wait_for_record(expect, 'access.log') is not None, 'found'
- def check_format(format, expect, url='/'):
- self.set_format(format)
+ format = 'BLAH\t0123456789'
+ check_format(format, format)
+ check_format('$uri $status $uri $status', '/ 200 / 200')
- assert self.get(url=url)['status'] == 200
- assert self.wait_for_record(expect) is not None, 'found'
- format = 'BLAH\t0123456789'
- check_format(format, format)
- check_format('$uri $status $uri $status', '/ 200 / 200')
+def test_access_log_variables(wait_for_record):
+ load('mirror')
- def test_access_log_variables(self):
- self.load('mirror')
+ # $body_bytes_sent
- # $body_bytes_sent
+ set_format('$uri $body_bytes_sent')
+ body = '0123456789' * 50
+ client.post(url='/bbs', body=body, read_timeout=1)
+ assert (
+ wait_for_record(fr'^\/bbs {len(body)}$', 'access.log') is not None
+ ), '$body_bytes_sent'
- self.set_format('$uri $body_bytes_sent')
- body = '0123456789' * 50
- self.post(url='/bbs', body=body, read_timeout=1)
- assert (
- self.wait_for_record(fr'^\/bbs {len(body)}$') is not None
- ), '$body_bytes_sent'
- def test_access_log_incorrect(self, temp_dir, skip_alert):
- skip_alert(r'failed to apply new conf')
+def test_access_log_incorrect(temp_dir, skip_alert):
+ skip_alert(r'failed to apply new conf')
- assert 'error' in self.conf(
- f'{option.temp_dir}/blah/access.log',
- 'access_log/path',
- ), 'access_log path incorrect'
+ assert 'error' in client.conf(
+ f'{temp_dir}/blah/access.log',
+ 'access_log/path',
+ ), 'access_log path incorrect'
- assert 'error' in self.conf(
- {
- 'path': f'{temp_dir}/access.log',
- 'format': '$remote_add',
- },
- 'access_log',
- ), 'access_log format incorrect'
+ assert 'error' in client.conf(
+ {
+ 'path': f'{temp_dir}/access.log',
+ 'format': '$remote_add',
+ },
+ 'access_log',
+ ), 'access_log format incorrect'
diff --git a/test/test_asgi_application.py b/test/test_asgi_application.py
index 5ce82cb2..98d4bcd5 100644
--- a/test/test_asgi_application.py
+++ b/test/test_asgi_application.py
@@ -3,24 +3,22 @@ import time
import pytest
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {
+ 'modules': {'python': lambda v: version.parse(v) >= version.parse('3.5')}
+}
+
+client = ApplicationPython(load_module='asgi')
-class TestASGIApplication(TestApplicationPython):
- prerequisites = {
- 'modules': {
- 'python': lambda v: version.parse(v) >= version.parse('3.5')
- }
- }
- load_module = 'asgi'
- def test_asgi_application_variables(self):
- self.load('variables')
+def test_asgi_application_variables(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- body = 'Test body string.'
+ body = 'Test body string.'
- resp = self.http(
- f"""POST / HTTP/1.1
+ resp = client.http(
+ f"""POST / HTTP/1.1
Host: localhost
Content-Length: {len(body)}
Custom-Header: blah
@@ -30,256 +28,230 @@ Connection: close
custom-header: BLAH
{body}""".encode(),
- raw=True,
- )
+ raw=True,
+ )
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
- assert headers == {
- 'Connection': 'close',
- 'content-length': str(len(body)),
- 'content-type': 'text/html',
- 'request-method': 'POST',
- 'request-uri': '/',
- 'http-host': 'localhost',
- 'http-version': '1.1',
- 'custom-header': 'blah, Blah, BLAH',
- 'asgi-version': '3.0',
- 'asgi-spec-version': '2.1',
- 'scheme': 'http',
- }, 'headers'
- assert resp['body'] == body, 'body'
-
- def test_asgi_application_unix(self, temp_dir):
- self.load('empty')
-
- addr = f'{temp_dir}/sock'
- assert 'success' in self.conf(
- {f"unix:{addr}": {"pass": "applications/empty"}}, 'listeners'
- )
+ assert headers == {
+ 'Connection': 'close',
+ 'content-length': str(len(body)),
+ 'content-type': 'text/html',
+ 'request-method': 'POST',
+ 'request-uri': '/',
+ 'http-host': 'localhost',
+ 'http-version': '1.1',
+ 'custom-header': 'blah, Blah, BLAH',
+ 'asgi-version': '3.0',
+ 'asgi-spec-version': '2.1',
+ 'scheme': 'http',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
- assert self.get(sock_type='unix', addr=addr)['status'] == 200
- def test_asgi_application_query_string(self):
- self.load('query_string')
+def test_asgi_application_ipv6():
+ client.load('empty')
- resp = self.get(url='/?var1=val1&var2=val2')
+ assert 'success' in client.conf(
+ {"[::1]:7080": {"pass": "applications/empty"}}, 'listeners'
+ )
- assert (
- resp['headers']['query-string'] == 'var1=val1&var2=val2'
- ), 'query-string header'
+ assert client.get(sock_type='ipv6')['status'] == 200
- def test_asgi_application_prefix(self):
- self.load('prefix', prefix='/api/rest')
- def set_prefix(prefix):
- self.conf(f'"{prefix}"', 'applications/prefix/prefix')
+def test_asgi_application_unix(temp_dir):
+ client.load('empty')
- def check_prefix(url, prefix):
- resp = self.get(url=url)
- assert resp['status'] == 200
- assert resp['headers']['prefix'] == prefix
+ addr = f'{temp_dir}/sock'
+ assert 'success' in client.conf(
+ {f"unix:{addr}": {"pass": "applications/empty"}}, 'listeners'
+ )
- check_prefix('/ap', 'NULL')
- check_prefix('/api', 'NULL')
- check_prefix('/api/', 'NULL')
- check_prefix('/api/res', 'NULL')
- check_prefix('/api/restful', 'NULL')
- check_prefix('/api/rest', '/api/rest')
- check_prefix('/api/rest/', '/api/rest')
- check_prefix('/api/rest/get', '/api/rest')
- check_prefix('/api/rest/get/blah', '/api/rest')
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200
- set_prefix('/api/rest/')
- check_prefix('/api/rest', '/api/rest')
- check_prefix('/api/restful', 'NULL')
- check_prefix('/api/rest/', '/api/rest')
- check_prefix('/api/rest/blah', '/api/rest')
- set_prefix('/app')
- check_prefix('/ap', 'NULL')
- check_prefix('/app', '/app')
- check_prefix('/app/', '/app')
- check_prefix('/application/', 'NULL')
+def test_asgi_application_query_string():
+ client.load('query_string')
- set_prefix('/')
- check_prefix('/', 'NULL')
- check_prefix('/app', 'NULL')
+ resp = client.get(url='/?var1=val1&var2=val2')
- def test_asgi_application_query_string_space(self):
- self.load('query_string')
+ assert (
+ resp['headers']['query-string'] == 'var1=val1&var2=val2'
+ ), 'query-string header'
- resp = self.get(url='/ ?var1=val1&var2=val2')
- assert (
- resp['headers']['query-string'] == 'var1=val1&var2=val2'
- ), 'query-string space'
- resp = self.get(url='/ %20?var1=val1&var2=val2')
- assert (
- resp['headers']['query-string'] == 'var1=val1&var2=val2'
- ), 'query-string space 2'
+def test_asgi_application_prefix():
+ client.load('prefix', prefix='/api/rest')
- resp = self.get(url='/ %20 ?var1=val1&var2=val2')
- assert (
- resp['headers']['query-string'] == 'var1=val1&var2=val2'
- ), 'query-string space 3'
+ def set_prefix(prefix):
+ client.conf(f'"{prefix}"', 'applications/prefix/prefix')
- resp = self.get(url='/blah %20 blah? var1= val1 & var2=val2')
- assert (
- resp['headers']['query-string'] == ' var1= val1 & var2=val2'
- ), 'query-string space 4'
+ def check_prefix(url, prefix):
+ resp = client.get(url=url)
+ assert resp['status'] == 200
+ assert resp['headers']['prefix'] == prefix
- def test_asgi_application_query_string_empty(self):
- self.load('query_string')
+ check_prefix('/ap', 'NULL')
+ check_prefix('/api', 'NULL')
+ check_prefix('/api/', 'NULL')
+ check_prefix('/api/res', 'NULL')
+ check_prefix('/api/restful', 'NULL')
+ check_prefix('/api/rest', '/api/rest')
+ check_prefix('/api/rest/', '/api/rest')
+ check_prefix('/api/rest/get', '/api/rest')
+ check_prefix('/api/rest/get/blah', '/api/rest')
- resp = self.get(url='/?')
+ set_prefix('/api/rest/')
+ check_prefix('/api/rest', '/api/rest')
+ check_prefix('/api/restful', 'NULL')
+ check_prefix('/api/rest/', '/api/rest')
+ check_prefix('/api/rest/blah', '/api/rest')
- assert resp['status'] == 200, 'query string empty status'
- assert resp['headers']['query-string'] == '', 'query string empty'
+ set_prefix('/app')
+ check_prefix('/ap', 'NULL')
+ check_prefix('/app', '/app')
+ check_prefix('/app/', '/app')
+ check_prefix('/application/', 'NULL')
- def test_asgi_application_query_string_absent(self):
- self.load('query_string')
+ set_prefix('/')
+ check_prefix('/', 'NULL')
+ check_prefix('/app', 'NULL')
- resp = self.get()
- assert resp['status'] == 200, 'query string absent status'
- assert resp['headers']['query-string'] == '', 'query string absent'
+def test_asgi_application_query_string_space():
+ client.load('query_string')
- @pytest.mark.skip('not yet')
- def test_asgi_application_server_port(self):
- self.load('server_port')
+ resp = client.get(url='/ ?var1=val1&var2=val2')
+ assert (
+ resp['headers']['query-string'] == 'var1=val1&var2=val2'
+ ), 'query-string space'
- assert (
- self.get()['headers']['Server-Port'] == '7080'
- ), 'Server-Port header'
+ resp = client.get(url='/ %20?var1=val1&var2=val2')
+ assert (
+ resp['headers']['query-string'] == 'var1=val1&var2=val2'
+ ), 'query-string space 2'
- @pytest.mark.skip('not yet')
- def test_asgi_application_working_directory_invalid(self):
- self.load('empty')
+ resp = client.get(url='/ %20 ?var1=val1&var2=val2')
+ assert (
+ resp['headers']['query-string'] == 'var1=val1&var2=val2'
+ ), 'query-string space 3'
- assert 'success' in self.conf(
- '"/blah"', 'applications/empty/working_directory'
- ), 'configure invalid working_directory'
+ resp = client.get(url='/blah %20 blah? var1= val1 & var2=val2')
+ assert (
+ resp['headers']['query-string'] == ' var1= val1 & var2=val2'
+ ), 'query-string space 4'
- assert self.get()['status'] == 500, 'status'
- def test_asgi_application_204_transfer_encoding(self):
- self.load('204_no_content')
+def test_asgi_application_query_string_empty():
+ client.load('query_string')
- assert (
- 'Transfer-Encoding' not in self.get()['headers']
- ), '204 header transfer encoding'
+ resp = client.get(url='/?')
- def test_asgi_application_shm_ack_handle(self):
- # Minimum possible limit
- shm_limit = 10 * 1024 * 1024
+ assert resp['status'] == 200, 'query string empty status'
+ assert resp['headers']['query-string'] == '', 'query string empty'
- self.load('mirror', limits={"shm": shm_limit})
- # Should exceed shm_limit
- max_body_size = 12 * 1024 * 1024
+def test_asgi_application_query_string_absent():
+ client.load('query_string')
- assert 'success' in self.conf(
- f'{{"http":{{"max_body_size": {max_body_size} }}}}',
- 'settings',
- )
+ resp = client.get()
- assert self.get()['status'] == 200, 'init'
+ assert resp['status'] == 200, 'query string absent status'
+ assert resp['headers']['query-string'] == '', 'query string absent'
- body = '0123456789AB' * 1024 * 1024 # 12 Mb
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['body'] == body, 'keep-alive 1'
+@pytest.mark.skip('not yet')
+def test_asgi_application_server_port():
+ client.load('server_port')
- def test_asgi_keepalive_body(self):
- self.load('mirror')
+ assert (
+ client.get()['headers']['Server-Port'] == '7080'
+ ), 'Server-Port header'
- assert self.get()['status'] == 200, 'init'
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+@pytest.mark.skip('not yet')
+def test_asgi_application_working_directory_invalid():
+ client.load('empty')
+
+ assert 'success' in client.conf(
+ '"/blah"', 'applications/empty/working_directory'
+ ), 'configure invalid working_directory'
- assert resp['body'] == body, 'keep-alive 1'
+ assert client.get()['status'] == 500, 'status'
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
- assert resp['body'] == body, 'keep-alive 2'
+def test_asgi_application_204_transfer_encoding():
+ client.load('204_no_content')
- def test_asgi_keepalive_reconfigure(self):
- self.load('mirror')
+ assert (
+ 'Transfer-Encoding' not in client.get()['headers']
+ ), '204 header transfer encoding'
- assert self.get()['status'] == 200, 'init'
- body = '0123456789'
- conns = 3
- socks = []
+def test_asgi_application_shm_ack_handle():
+ # Minimum possible limit
+ shm_limit = 10 * 1024 * 1024
- for i in range(conns):
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ client.load('mirror', limits={"shm": shm_limit})
- assert resp['body'] == body, 'keep-alive open'
+ # Should exceed shm_limit
+ max_body_size = 12 * 1024 * 1024
- self.load('mirror', processes=i + 1)
+ assert 'success' in client.conf(
+ f'{{"http":{{"max_body_size": {max_body_size} }}}}',
+ 'settings',
+ )
- socks.append(sock)
+ assert client.get()['status'] == 200, 'init'
- for i in range(conns):
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- sock=socks[i],
- body=body,
- read_timeout=1,
- )
+ body = '0123456789AB' * 1024 * 1024 # 12 Mb
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['body'] == body, 'keep-alive request'
+ assert resp['body'] == body, 'keep-alive 1'
- self.load('mirror', processes=i + 1)
- for i in range(conns):
- resp = self.post(sock=socks[i], body=body)
+def test_asgi_keepalive_body():
+ client.load('mirror')
- assert resp['body'] == body, 'keep-alive close'
+ assert client.get()['status'] == 200, 'init'
- self.load('mirror', processes=i + 1)
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
- def test_asgi_keepalive_reconfigure_2(self):
- self.load('mirror')
+ assert resp['body'] == body, 'keep-alive 1'
- assert self.get()['status'] == 200, 'init'
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
- body = '0123456789'
+ assert resp['body'] == body, 'keep-alive 2'
- (resp, sock) = self.post(
+
+def test_asgi_keepalive_reconfigure():
+ client.load('mirror')
+
+ assert client.get()['status'] == 200, 'init'
+
+ body = '0123456789'
+ conns = 3
+ socks = []
+
+ for i in range(conns):
+ (resp, sock) = client.post(
headers={
'Host': 'localhost',
'Connection': 'keep-alive',
@@ -289,162 +261,216 @@ custom-header: BLAH
read_timeout=1,
)
- assert resp['body'] == body, 'reconfigure 2 keep-alive 1'
+ assert resp['body'] == body, 'keep-alive open'
- self.load('empty')
+ client.load('mirror', processes=i + 1)
- assert self.get()['status'] == 200, 'init'
+ socks.append(sock)
- (resp, sock) = self.post(start=True, sock=sock, body=body)
+ for i in range(conns):
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ sock=socks[i],
+ body=body,
+ read_timeout=1,
+ )
- assert resp['status'] == 200, 'reconfigure 2 keep-alive 2'
- assert resp['body'] == '', 'reconfigure 2 keep-alive 2 body'
+ assert resp['body'] == body, 'keep-alive request'
- assert 'success' in self.conf(
- {"listeners": {}, "applications": {}}
- ), 'reconfigure 2 clear configuration'
+ client.load('mirror', processes=i + 1)
- resp = self.get(sock=sock)
+ for i in range(conns):
+ resp = client.post(sock=socks[i], body=body)
- assert resp == {}, 'reconfigure 2 keep-alive 3'
+ assert resp['body'] == body, 'keep-alive close'
- def test_asgi_keepalive_reconfigure_3(self):
- self.load('empty')
+ client.load('mirror', processes=i + 1)
- assert self.get()['status'] == 200, 'init'
- sock = self.http(
- b"""GET / HTTP/1.1
-""",
- raw=True,
- no_recv=True,
- )
+def test_asgi_keepalive_reconfigure_2():
+ client.load('mirror')
- assert self.get()['status'] == 200
+ assert client.get()['status'] == 200, 'init'
- assert 'success' in self.conf(
- {"listeners": {}, "applications": {}}
- ), 'reconfigure 3 clear configuration'
+ body = '0123456789'
- resp = self.http(
- b"""Host: localhost
-Connection: close
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
-""",
- sock=sock,
- raw=True,
- )
+ assert resp['body'] == body, 'reconfigure 2 keep-alive 1'
- assert resp['status'] == 200, 'reconfigure 3'
+ client.load('empty')
- def test_asgi_process_switch(self):
- self.load('delayed', processes=2)
+ assert client.get()['status'] == 200, 'init'
- self.get(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '0',
- 'X-Delay': '5',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+ (resp, sock) = client.post(start=True, sock=sock, body=body)
- headers_delay_1 = {
- 'Connection': 'close',
- 'Host': 'localhost',
- 'Content-Length': '0',
- 'X-Delay': '1',
- }
+ assert resp['status'] == 200, 'reconfigure 2 keep-alive 2'
+ assert resp['body'] == '', 'reconfigure 2 keep-alive 2 body'
- self.get(headers=headers_delay_1, no_recv=True)
+ assert 'success' in client.conf(
+ {"listeners": {}, "applications": {}}
+ ), 'reconfigure 2 clear configuration'
- time.sleep(0.5)
+ resp = client.get(sock=sock)
- for _ in range(10):
- self.get(headers=headers_delay_1, no_recv=True)
+ assert resp == {}, 'reconfigure 2 keep-alive 3'
- self.get(headers=headers_delay_1)
- def test_asgi_application_loading_error(self, skip_alert):
- skip_alert(r'Python failed to import module "blah"')
+def test_asgi_keepalive_reconfigure_3():
+ client.load('empty')
- self.load('empty', module="blah")
+ assert client.get()['status'] == 200, 'init'
- assert self.get()['status'] == 503, 'loading error'
+ sock = client.http(
+ b"""GET / HTTP/1.1
+""",
+ raw=True,
+ no_recv=True,
+ )
- def test_asgi_application_threading(self):
- """wait_for_record() timeouts after 5s while every thread works at
- least 3s. So without releasing GIL test should fail.
- """
+ assert client.get()['status'] == 200
- self.load('threading')
+ assert 'success' in client.conf(
+ {"listeners": {}, "applications": {}}
+ ), 'reconfigure 3 clear configuration'
- for _ in range(10):
- self.get(no_recv=True)
+ resp = client.http(
+ b"""Host: localhost
+Connection: close
- assert (
- self.wait_for_record(r'\(5\) Thread: 100', wait=50) is not None
- ), 'last thread finished'
+""",
+ sock=sock,
+ raw=True,
+ )
- def test_asgi_application_threads(self):
- self.load('threads', threads=2)
+ assert resp['status'] == 200, 'reconfigure 3'
- socks = []
- for i in range(2):
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '3',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+def test_asgi_process_switch():
+ client.load('delayed', processes=2)
- socks.append(sock)
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'X-Delay': '5',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+
+ headers_delay_1 = {
+ 'Connection': 'close',
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'X-Delay': '1',
+ }
- time.sleep(1.0) # required to avoid greedy request reading
+ client.get(headers=headers_delay_1, no_recv=True)
- threads = set()
+ time.sleep(0.5)
- for sock in socks:
- resp = self.recvall(sock).decode('utf-8')
+ for _ in range(10):
+ client.get(headers=headers_delay_1, no_recv=True)
- self.log_in(resp)
+ client.get(headers=headers_delay_1)
- resp = self._resp_to_dict(resp)
- assert resp['status'] == 200, 'status'
+def test_asgi_application_loading_error(skip_alert):
+ skip_alert(r'Python failed to import module "blah"')
- threads.add(resp['headers']['x-thread'])
+ client.load('empty', module="blah")
- sock.close()
+ assert client.get()['status'] == 503, 'loading error'
- assert len(socks) == len(threads), 'threads differs'
- def test_asgi_application_legacy(self):
- self.load('legacy')
+def test_asgi_application_threading(wait_for_record):
+ """wait_for_record() timeouts after 5s while every thread works at
+ least 3s. So without releasing GIL test should fail.
+ """
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '0',
- 'Connection': 'close',
- },
- )
+ client.load('threading')
- assert resp['status'] == 200, 'status'
+ for _ in range(10):
+ client.get(no_recv=True)
+
+ assert (
+ wait_for_record(r'\(5\) Thread: 100', wait=50) is not None
+ ), 'last thread finished'
- def test_asgi_application_legacy_force(self):
- self.load('legacy_force', protocol='asgi')
- resp = self.get(
+def test_asgi_application_threads():
+ client.load('threads', threads=2)
+
+ socks = []
+
+ for _ in range(2):
+ sock = client.get(
headers={
'Host': 'localhost',
- 'Content-Length': '0',
+ 'X-Delay': '3',
'Connection': 'close',
},
+ no_recv=True,
)
+ socks.append(sock)
+
+ time.sleep(1.0) # required to avoid greedy request reading
+
+ threads = set()
+
+ for sock in socks:
+ resp = client.recvall(sock).decode('utf-8')
+
+ client.log_in(resp)
+
+ resp = client._resp_to_dict(resp)
+
assert resp['status'] == 200, 'status'
+
+ threads.add(resp['headers']['x-thread'])
+
+ sock.close()
+
+ assert len(socks) == len(threads), 'threads differs'
+
+
+def test_asgi_application_legacy():
+ client.load('legacy')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'Connection': 'close',
+ },
+ )
+
+ assert resp['status'] == 200, 'status'
+
+
+def test_asgi_application_legacy_force():
+ client.load('legacy_force', protocol='asgi')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'Connection': 'close',
+ },
+ )
+
+ assert resp['status'] == 200, 'status'
diff --git a/test/test_asgi_application_unix_abstract.py b/test/test_asgi_application_unix_abstract.py
index 2ca7839f..980a98a9 100644
--- a/test/test_asgi_application_unix_abstract.py
+++ b/test/test_asgi_application_unix_abstract.py
@@ -1,23 +1,21 @@
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {
+ 'modules': {'python': lambda v: version.parse(v) >= version.parse('3.5')},
+ 'features': {'unix_abstract': True},
+}
-class TestASGIApplicationUnixAbstract(TestApplicationPython):
- prerequisites = {
- 'modules': {
- 'python': lambda v: version.parse(v) >= version.parse('3.5')
- },
- 'features': ['unix_abstract'],
- }
- load_module = 'asgi'
+client = ApplicationPython(load_module='asgi')
- def test_asgi_application_unix_abstract(self):
- self.load('empty')
- addr = '\0sock'
- assert 'success' in self.conf(
- {f"unix:@{addr[1:]}": {"pass": "applications/empty"}},
- 'listeners',
- )
+def test_asgi_application_unix_abstract():
+ client.load('empty')
- assert self.get(sock_type='unix', addr=addr)['status'] == 200
+ addr = '\0sock'
+ assert 'success' in client.conf(
+ {f"unix:@{addr[1:]}": {"pass": "applications/empty"}},
+ 'listeners',
+ )
+
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200
diff --git a/test/test_asgi_lifespan.py b/test/test_asgi_lifespan.py
index 84e9fea4..499f523d 100644
--- a/test/test_asgi_lifespan.py
+++ b/test/test_asgi_lifespan.py
@@ -2,123 +2,126 @@ import os
from conftest import unit_stop
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {
+ 'modules': {'python': lambda v: version.parse(v) >= version.parse('3.5')}
+}
-class TestASGILifespan(TestApplicationPython):
- prerequisites = {
- 'modules': {
- 'python': lambda v: version.parse(v) >= version.parse('3.5')
- }
- }
- load_module = 'asgi'
+client = ApplicationPython(load_module='asgi')
- def setup_cookies(self, prefix):
- base_dir = f'{option.test_dir}/python/lifespan/empty'
- os.chmod(base_dir, 0o777)
+def assert_cookies(prefix):
+ for name in ['startup', 'shutdown']:
+ path = f'{option.test_dir}/python/lifespan/empty/{prefix}{name}'
+ exists = os.path.isfile(path)
+ if exists:
+ os.remove(path)
- for name in ['startup', 'shutdown', 'version']:
- path = f'{option.test_dir}/python/lifespan/empty/{prefix}{name}'
- open(path, 'a').close()
- os.chmod(path, 0o777)
+ assert not exists, name
- def assert_cookies(self, prefix):
- for name in ['startup', 'shutdown']:
- path = f'{option.test_dir}/python/lifespan/empty/{prefix}{name}'
- exists = os.path.isfile(path)
- if exists:
- os.remove(path)
+ path = f'{option.test_dir}/python/lifespan/empty/{prefix}version'
- assert not exists, name
+ with open(path, 'r') as f:
+ version = f.read()
- path = f'{option.test_dir}/python/lifespan/empty/{prefix}version'
+ os.remove(path)
- with open(path, 'r') as f:
- version = f.read()
+ assert version == '3.0 2.0', 'version'
- os.remove(path)
- assert version == '3.0 2.0', 'version'
+def setup_cookies(prefix):
+ base_dir = f'{option.test_dir}/python/lifespan/empty'
- def test_asgi_lifespan(self):
- self.load('lifespan/empty')
+ os.chmod(base_dir, 0o777)
- self.setup_cookies('')
+ for name in ['startup', 'shutdown', 'version']:
+ path = f'{option.test_dir}/python/lifespan/empty/{prefix}{name}'
+ open(path, 'a').close()
+ os.chmod(path, 0o777)
- assert self.get()['status'] == 204
- unit_stop()
+def test_asgi_lifespan():
+ client.load('lifespan/empty')
- self.assert_cookies('')
+ setup_cookies('')
- def test_asgi_lifespan_targets(self):
- path = f'{option.test_dir}/python/lifespan/empty'
+ assert client.get()['status'] == 204
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/1"},
- "action": {"pass": "applications/targets/1"},
- },
- {
- "match": {"uri": "/2"},
- "action": {"pass": "applications/targets/2"},
- },
- ],
- "applications": {
+ unit_stop()
+
+ assert_cookies('')
+
+
+def test_asgi_lifespan_targets():
+ path = f'{option.test_dir}/python/lifespan/empty'
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/1"},
+ "action": {"pass": "applications/targets/1"},
+ },
+ {
+ "match": {"uri": "/2"},
+ "action": {"pass": "applications/targets/2"},
+ },
+ ],
+ "applications": {
+ "targets": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "working_directory": path,
+ "path": path,
"targets": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "working_directory": path,
- "path": path,
- "targets": {
- "1": {"module": "asgi", "callable": "application"},
- "2": {
- "module": "asgi",
- "callable": "application2",
- },
+ "1": {"module": "asgi", "callable": "application"},
+ "2": {
+ "module": "asgi",
+ "callable": "application2",
},
- }
- },
- }
- )
+ },
+ }
+ },
+ }
+ )
+
+ setup_cookies('')
+ setup_cookies('app2_')
+
+ assert client.get(url="/1")['status'] == 204
+ assert client.get(url="/2")['status'] == 204
+
+ unit_stop()
- self.setup_cookies('')
- self.setup_cookies('app2_')
+ assert_cookies('')
+ assert_cookies('app2_')
- assert self.get(url="/1")['status'] == 204
- assert self.get(url="/2")['status'] == 204
- unit_stop()
+def test_asgi_lifespan_failed(wait_for_record):
+ client.load('lifespan/failed')
- self.assert_cookies('')
- self.assert_cookies('app2_')
+ assert client.get()['status'] == 503
- def test_asgi_lifespan_failed(self):
- self.load('lifespan/failed')
+ assert (
+ wait_for_record(r'\[error\].*Application startup failed') is not None
+ ), 'error message'
+ assert wait_for_record(r'Exception blah') is not None, 'exception'
- assert self.get()['status'] == 503
- assert (
- self.wait_for_record(r'\[error\].*Application startup failed')
- is not None
- ), 'error message'
- assert self.wait_for_record(r'Exception blah') is not None, 'exception'
+def test_asgi_lifespan_error(wait_for_record):
+ client.load('lifespan/error')
- def test_asgi_lifespan_error(self):
- self.load('lifespan/error')
+ client.get()
- self.get()
+ assert wait_for_record(r'Exception blah') is not None, 'exception'
- assert self.wait_for_record(r'Exception blah') is not None, 'exception'
- def test_asgi_lifespan_error_auto(self):
- self.load('lifespan/error_auto')
+def test_asgi_lifespan_error_auto(wait_for_record):
+ client.load('lifespan/error_auto')
- self.get()
+ client.get()
- assert self.wait_for_record(r'AssertionError') is not None, 'assertion'
+ assert wait_for_record(r'AssertionError') is not None, 'assertion'
diff --git a/test/test_asgi_targets.py b/test/test_asgi_targets.py
index 5afc7079..c3ec22f0 100644
--- a/test/test_asgi_targets.py
+++ b/test/test_asgi_targets.py
@@ -1,138 +1,142 @@
import pytest
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {
+ 'modules': {'python': lambda v: version.parse(v) >= version.parse('3.5')}
+}
-class TestASGITargets(TestApplicationPython):
- prerequisites = {
- 'modules': {
- 'python': lambda v: version.parse(v) >= version.parse('3.5')
- }
- }
- load_module = 'asgi'
+client = ApplicationPython(load_module='asgi')
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self):
- path = f'{option.test_dir}/python/targets/'
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/1"},
- "action": {"pass": "applications/targets/1"},
- },
- {
- "match": {"uri": "/2"},
- "action": {"pass": "applications/targets/2"},
- },
- ],
- "applications": {
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ path = f'{option.test_dir}/python/targets/'
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/1"},
+ "action": {"pass": "applications/targets/1"},
+ },
+ {
+ "match": {"uri": "/2"},
+ "action": {"pass": "applications/targets/2"},
+ },
+ ],
+ "applications": {
+ "targets": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "working_directory": path,
+ "path": path,
+ "protocol": "asgi",
"targets": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "working_directory": path,
- "path": path,
- "protocol": "asgi",
- "targets": {
- "1": {
- "module": "asgi",
- "callable": "application_200",
- },
- "2": {
- "module": "asgi",
- "callable": "application_201",
- },
+ "1": {
+ "module": "asgi",
+ "callable": "application_200",
},
- }
- },
- }
- )
+ "2": {
+ "module": "asgi",
+ "callable": "application_201",
+ },
+ },
+ }
+ },
+ }
+ )
- def conf_targets(self, targets):
- assert 'success' in self.conf(targets, 'applications/targets/targets')
- def test_asgi_targets(self):
- assert self.get(url='/1')['status'] == 200
- assert self.get(url='/2')['status'] == 201
+def conf_targets(targets):
+ assert 'success' in client.conf(targets, 'applications/targets/targets')
- def test_asgi_targets_legacy(self):
- self.conf_targets(
- {
- "1": {"module": "asgi", "callable": "legacy_application_200"},
- "2": {"module": "asgi", "callable": "legacy_application_201"},
- }
- )
- assert self.get(url='/1')['status'] == 200
- assert self.get(url='/2')['status'] == 201
+def test_asgi_targets():
+ assert client.get(url='/1')['status'] == 200
+ assert client.get(url='/2')['status'] == 201
- def test_asgi_targets_mix(self):
- self.conf_targets(
- {
- "1": {"module": "asgi", "callable": "application_200"},
- "2": {"module": "asgi", "callable": "legacy_application_201"},
- }
- )
- assert self.get(url='/1')['status'] == 200
- assert self.get(url='/2')['status'] == 201
+def test_asgi_targets_legacy():
+ conf_targets(
+ {
+ "1": {"module": "asgi", "callable": "legacy_application_200"},
+ "2": {"module": "asgi", "callable": "legacy_application_201"},
+ }
+ )
- def test_asgi_targets_broken(self, skip_alert):
- skip_alert(r'Python failed to get "blah" from module')
+ assert client.get(url='/1')['status'] == 200
+ assert client.get(url='/2')['status'] == 201
- self.conf_targets(
- {
- "1": {"module": "asgi", "callable": "application_200"},
- "2": {"module": "asgi", "callable": "blah"},
- }
- )
- assert self.get(url='/1')['status'] != 200
+def test_asgi_targets_mix():
+ conf_targets(
+ {
+ "1": {"module": "asgi", "callable": "application_200"},
+ "2": {"module": "asgi", "callable": "legacy_application_201"},
+ }
+ )
+
+ assert client.get(url='/1')['status'] == 200
+ assert client.get(url='/2')['status'] == 201
+
- def test_asgi_targets_prefix(self):
- self.conf_targets(
+def test_asgi_targets_broken(skip_alert):
+ skip_alert(r'Python failed to get "blah" from module')
+
+ conf_targets(
+ {
+ "1": {"module": "asgi", "callable": "application_200"},
+ "2": {"module": "asgi", "callable": "blah"},
+ }
+ )
+
+ assert client.get(url='/1')['status'] != 200
+
+
+def test_asgi_targets_prefix():
+ conf_targets(
+ {
+ "1": {
+ "module": "asgi",
+ "callable": "application_prefix",
+ "prefix": "/1/",
+ },
+ "2": {
+ "module": "asgi",
+ "callable": "application_prefix",
+ "prefix": "/api",
+ },
+ }
+ )
+ client.conf(
+ [
{
- "1": {
- "module": "asgi",
- "callable": "application_prefix",
- "prefix": "/1/",
- },
- "2": {
- "module": "asgi",
- "callable": "application_prefix",
- "prefix": "/api",
- },
- }
- )
- self.conf(
- [
- {
- "match": {"uri": "/1*"},
- "action": {"pass": "applications/targets/1"},
- },
- {
- "match": {"uri": "*"},
- "action": {"pass": "applications/targets/2"},
- },
- ],
- "routes",
- )
-
- def check_prefix(url, prefix):
- resp = self.get(url=url)
- assert resp['status'] == 200
- assert resp['headers']['prefix'] == prefix
-
- check_prefix('/1', '/1')
- check_prefix('/11', 'NULL')
- check_prefix('/1/', '/1')
- check_prefix('/', 'NULL')
- check_prefix('/ap', 'NULL')
- check_prefix('/api', '/api')
- check_prefix('/api/', '/api')
- check_prefix('/api/test/', '/api')
- check_prefix('/apis', 'NULL')
- check_prefix('/apis/', 'NULL')
+ "match": {"uri": "/1*"},
+ "action": {"pass": "applications/targets/1"},
+ },
+ {
+ "match": {"uri": "*"},
+ "action": {"pass": "applications/targets/2"},
+ },
+ ],
+ "routes",
+ )
+
+ def check_prefix(url, prefix):
+ resp = client.get(url=url)
+ assert resp['status'] == 200
+ assert resp['headers']['prefix'] == prefix
+
+ check_prefix('/1', '/1')
+ check_prefix('/11', 'NULL')
+ check_prefix('/1/', '/1')
+ check_prefix('/', 'NULL')
+ check_prefix('/ap', 'NULL')
+ check_prefix('/api', '/api')
+ check_prefix('/api/', '/api')
+ check_prefix('/api/test/', '/api')
+ check_prefix('/apis', 'NULL')
+ check_prefix('/apis/', 'NULL')
diff --git a/test/test_asgi_websockets.py b/test/test_asgi_websockets.py
index b15bee43..eb7a20e7 100644
--- a/test/test_asgi_websockets.py
+++ b/test/test_asgi_websockets.py
@@ -3,1497 +3,1502 @@ import time
import pytest
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
-from unit.applications.websockets import TestApplicationWebsocket
-from unit.option import option
+from unit.applications.lang.python import ApplicationPython
+from unit.applications.websockets import ApplicationWebsocket
+prerequisites = {
+ 'modules': {'python': lambda v: version.parse(v) >= version.parse('3.5')}
+}
-class TestASGIWebsockets(TestApplicationPython):
- prerequisites = {
- 'modules': {
- 'python': lambda v: version.parse(v) >= version.parse('3.5')
- }
- }
- load_module = 'asgi'
+client = ApplicationPython(load_module='asgi')
+ws = ApplicationWebsocket()
- ws = TestApplicationWebsocket()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
- ), 'clear keepalive_interval'
+@pytest.fixture(autouse=True)
+def setup_method_fixture(skip_alert):
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
+ ), 'clear keepalive_interval'
- skip_alert(r'socket close\(\d+\) failed')
+ skip_alert(r'socket close\(\d+\) failed')
- def close_connection(self, sock):
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+def close_connection(sock):
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- def check_close(self, sock, code=1000, no_close=False, frame=None):
- if frame == None:
- frame = self.ws.frame_read(sock)
+ check_close(sock)
- assert frame['fin'] == True, 'close fin'
- assert frame['opcode'] == self.ws.OP_CLOSE, 'close opcode'
- assert frame['code'] == code, 'close code'
- if not no_close:
- sock.close()
+def check_close(sock, code=1000, no_close=False, frame=None):
+ if frame is None:
+ frame = ws.frame_read(sock)
- def check_frame(self, frame, fin, opcode, payload, decode=True):
- if opcode == self.ws.OP_BINARY or not decode:
- data = frame['data']
- else:
- data = frame['data'].decode('utf-8')
+ assert frame['fin'], 'close fin'
+ assert frame['opcode'] == ws.OP_CLOSE, 'close opcode'
+ assert frame['code'] == code, 'close code'
- assert frame['fin'] == fin, 'fin'
- assert frame['opcode'] == opcode, 'opcode'
- assert data == payload, 'payload'
+ if not no_close:
+ sock.close()
- def test_asgi_websockets_handshake(self):
- self.load('websockets/mirror')
- resp, sock, key = self.ws.upgrade()
- sock.close()
+def check_frame(frame, fin, opcode, payload, decode=True):
+ if opcode == ws.OP_BINARY or not decode:
+ data = frame['data']
+ else:
+ data = frame['data'].decode('utf-8')
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+ assert frame['fin'] == fin, 'fin'
+ assert frame['opcode'] == opcode, 'opcode'
+ assert data == payload, 'payload'
- # remove "mirror" application
- self.load('websockets/subprotocol')
- def test_asgi_websockets_subprotocol(self):
- self.load('websockets/subprotocol')
+def test_asgi_websockets_handshake():
+ client.load('websockets/mirror')
- resp, sock, key = self.ws.upgrade()
- sock.close()
+ resp, sock, key = ws.upgrade()
+ sock.close()
- assert resp['status'] == 101, 'status'
- assert (
- resp['headers']['x-subprotocols'] == "('chat', 'phone', 'video')"
- ), 'subprotocols'
- assert resp['headers']['sec-websocket-protocol'] == 'chat', 'key'
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- def test_asgi_websockets_mirror(self):
- self.load('websockets/mirror')
+ # remove "mirror" application
+ client.load('websockets/subprotocol')
- message = 'blah'
- _, sock, _ = self.ws.upgrade()
+def test_asgi_websockets_subprotocol():
+ client.load('websockets/subprotocol')
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+ resp, sock, _ = ws.upgrade()
+ sock.close()
- assert message == frame['data'].decode('utf-8'), 'mirror'
+ assert resp['status'] == 101, 'status'
+ assert (
+ resp['headers']['x-subprotocols'] == "('chat', 'phone', 'video')"
+ ), 'subprotocols'
+ assert resp['headers']['sec-websocket-protocol'] == 'chat', 'key'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
- assert message == frame['data'].decode('utf-8'), 'mirror 2'
+def test_asgi_websockets_mirror():
+ client.load('websockets/mirror')
- sock.close()
+ message = 'blah'
- def test_asgi_websockets_mirror_app_change(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- message = 'blah'
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- _, sock, _ = self.ws.upgrade()
+ assert message == frame['data'].decode('utf-8'), 'mirror'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- assert message == frame['data'].decode('utf-8'), 'mirror'
+ assert message == frame['data'].decode('utf-8'), 'mirror 2'
- self.load('websockets/subprotocol')
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
- assert message == frame['data'].decode('utf-8'), 'mirror 2'
+def test_asgi_websockets_mirror_app_change():
+ client.load('websockets/mirror')
- sock.close()
+ message = 'blah'
- def test_asgi_websockets_no_mask(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- message = 'blah'
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- _, sock, _ = self.ws.upgrade()
+ assert message == frame['data'].decode('utf-8'), 'mirror'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, mask=False)
+ client.load('websockets/subprotocol')
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- assert frame['opcode'] == self.ws.OP_CLOSE, 'no mask opcode'
- assert frame['code'] == 1002, 'no mask close code'
+ assert message == frame['data'].decode('utf-8'), 'mirror 2'
- sock.close()
+ sock.close()
- def test_asgi_websockets_fragmentation(self):
- self.load('websockets/mirror')
- message = 'blah'
+def test_asgi_websockets_no_mask():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ message = 'blah'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, ' ', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, message)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, message, mask=False)
- assert f'{message} {message}' == frame['data'].decode(
- 'utf-8'
- ), 'mirror framing'
+ frame = ws.frame_read(sock)
- sock.close()
+ assert frame['opcode'] == ws.OP_CLOSE, 'no mask opcode'
+ assert frame['code'] == 1002, 'no mask close code'
- def test_asgi_websockets_length_long(self):
- self.load('websockets/mirror')
+ sock.close()
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', length=2**64 - 1
- )
+def test_asgi_websockets_fragmentation():
+ client.load('websockets/mirror')
- self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
+ message = 'blah'
- def test_asgi_websockets_frame_fragmentation_invalid(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- message = 'blah'
+ ws.frame_write(sock, ws.OP_TEXT, message, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, ' ', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, message)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
- self.ws.frame_write(sock, self.ws.OP_PING, message, fin=False)
+ assert f'{message} {message}' == frame['data'].decode(
+ 'utf-8'
+ ), 'mirror framing'
- frame = self.ws.frame_read(sock)
+ sock.close()
- frame.pop('data')
- assert frame == {
- 'fin': True,
- 'rsv1': False,
- 'rsv2': False,
- 'rsv3': False,
- 'opcode': self.ws.OP_CLOSE,
- 'mask': 0,
- 'code': 1002,
- 'reason': 'Fragmented control frame',
- }, 'close frame'
- sock.close()
+def test_asgi_websockets_length_long():
+ client.load('websockets/mirror')
- def test_asgi_websockets_large(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- message = '0123456789' * 300
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', length=2**64 - 1)
- _, sock, _ = self.ws.upgrade()
+ check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
- data = frame['data'].decode('utf-8')
+def test_asgi_websockets_frame_fragmentation_invalid():
+ client.load('websockets/mirror')
- frame = self.ws.frame_read(sock)
- data += frame['data'].decode('utf-8')
+ message = 'blah'
- assert message == data, 'large'
+ _, sock, _ = ws.upgrade()
- sock.close()
+ ws.frame_write(sock, ws.OP_PING, message, fin=False)
- def test_asgi_websockets_two_clients(self):
- self.load('websockets/mirror')
+ frame = ws.frame_read(sock)
- message1 = 'blah1'
- message2 = 'blah2'
+ frame.pop('data')
+ assert frame == {
+ 'fin': True,
+ 'rsv1': False,
+ 'rsv2': False,
+ 'rsv3': False,
+ 'opcode': ws.OP_CLOSE,
+ 'mask': 0,
+ 'code': 1002,
+ 'reason': 'Fragmented control frame',
+ }, 'close frame'
- _, sock1, _ = self.ws.upgrade()
- _, sock2, _ = self.ws.upgrade()
+ sock.close()
- self.ws.frame_write(sock1, self.ws.OP_TEXT, message1)
- self.ws.frame_write(sock2, self.ws.OP_TEXT, message2)
- frame1 = self.ws.frame_read(sock1)
- frame2 = self.ws.frame_read(sock2)
+def test_asgi_websockets_large():
+ client.load('websockets/mirror')
- assert message1 == frame1['data'].decode('utf-8'), 'client 1'
- assert message2 == frame2['data'].decode('utf-8'), 'client 2'
+ message = '0123456789' * 300
- sock1.close()
- sock2.close()
+ _, sock, _ = ws.upgrade()
- @pytest.mark.skip('not yet')
- def test_asgi_websockets_handshake_upgrade_absent(
- self,
- ): # FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, message)
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
+ frame = ws.frame_read(sock)
+ data = frame['data'].decode('utf-8')
- assert resp['status'] == 400, 'upgrade absent'
+ frame = ws.frame_read(sock)
+ data += frame['data'].decode('utf-8')
- def test_asgi_websockets_handshake_case_insensitive(self):
- self.load('websockets/mirror')
+ assert message == data, 'large'
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'WEBSOCKET',
- 'Connection': 'UPGRADE',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
+ sock.close()
- assert resp['status'] == 101, 'status'
-
- @pytest.mark.skip('not yet')
- def test_asgi_websockets_handshake_connection_absent(self): # FAIL
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_asgi_websockets_handshake_version_absent(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- },
- )
-
- assert resp['status'] == 426, 'status'
-
- @pytest.mark.skip('not yet')
- def test_asgi_websockets_handshake_key_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': '!',
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'key length'
-
- key = self.ws.key()
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': [key, key],
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert (
- resp['status'] == 400
- ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
-
- def test_asgi_websockets_handshake_method_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_asgi_websockets_handshake_http_10(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- http_10=True,
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_asgi_websockets_handshake_uri_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- url='!',
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_asgi_websockets_protocol_absent(self):
- self.load('websockets/mirror')
-
- key = self.ws.key()
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': key,
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+def test_asgi_websockets_two_clients():
+ client.load('websockets/mirror')
- # autobahn-testsuite
- #
- # Some following tests fail because of Unit does not support UTF-8
- # validation for websocket frames. It should be implemented
- # by application, if necessary.
+ message1 = 'blah1'
+ message2 = 'blah2'
- def test_asgi_websockets_1_1_1__1_1_8(self):
- self.load('websockets/mirror')
+ _, sock1, _ = ws.upgrade()
+ _, sock2, _ = ws.upgrade()
- opcode = self.ws.OP_TEXT
+ ws.frame_write(sock1, ws.OP_TEXT, message1)
+ ws.frame_write(sock2, ws.OP_TEXT, message2)
- _, sock, _ = self.ws.upgrade()
+ frame1 = ws.frame_read(sock1)
+ frame2 = ws.frame_read(sock2)
- def check_length(length, chopsize=None):
- payload = '*' * length
+ assert message1 == frame1['data'].decode('utf-8'), 'client 1'
+ assert message2 == frame2['data'].decode('utf-8'), 'client 2'
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ sock1.close()
+ sock2.close()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
- check_length(0) # 1_1_1
- check_length(125) # 1_1_2
- check_length(126) # 1_1_3
- check_length(127) # 1_1_4
- check_length(128) # 1_1_5
- check_length(65535) # 1_1_6
- check_length(65536) # 1_1_7
- check_length(65536, chopsize=997) # 1_1_8
+# FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
+@pytest.mark.skip('not yet')
+def test_asgi_websockets_handshake_upgrade_absent():
+ client.load('websockets/mirror')
- self.close_connection(sock)
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
- def test_asgi_websockets_1_2_1__1_2_8(self):
- self.load('websockets/mirror')
+ assert resp['status'] == 400, 'upgrade absent'
- opcode = self.ws.OP_BINARY
- _, sock, _ = self.ws.upgrade()
+def test_asgi_websockets_handshake_case_insensitive():
+ client.load('websockets/mirror')
- def check_length(length, chopsize=None):
- payload = b'\xfe' * length
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'WEBSOCKET',
+ 'Connection': 'UPGRADE',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
+
+ assert resp['status'] == 101, 'status'
+
+
+@pytest.mark.skip('not yet')
+def test_asgi_websockets_handshake_connection_absent(): # FAIL
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_asgi_websockets_handshake_version_absent():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ },
+ )
+
+ assert resp['status'] == 426, 'status'
+
+
+@pytest.mark.skip('not yet')
+def test_asgi_websockets_handshake_key_invalid():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': '!',
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'key length'
+
+ key = ws.key()
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': [key, key],
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert (
+ resp['status'] == 400
+ ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
+
+
+def test_asgi_websockets_handshake_method_invalid():
+ client.load('websockets/mirror')
+
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_asgi_websockets_handshake_http_10():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ http_10=True,
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_asgi_websockets_handshake_uri_invalid():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ url='!',
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_asgi_websockets_protocol_absent():
+ client.load('websockets/mirror')
+
+ key = ws.key()
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': key,
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- self.check_frame(frame, True, opcode, payload)
- check_length(0) # 1_2_1
- check_length(125) # 1_2_2
- check_length(126) # 1_2_3
- check_length(127) # 1_2_4
- check_length(128) # 1_2_5
- check_length(65535) # 1_2_6
- check_length(65536) # 1_2_7
- check_length(65536, chopsize=997) # 1_2_8
+# autobahn-testsuite
+#
+# Some following tests fail because of Unit does not support UTF-8
+# validation for websocket frames. It should be implemented
+# by application, if necessary.
- self.close_connection(sock)
- def test_asgi_websockets_2_1__2_6(self):
- self.load('websockets/mirror')
+def test_asgi_websockets_1_1_1__1_1_8():
+ client.load('websockets/mirror')
- op_ping = self.ws.OP_PING
- op_pong = self.ws.OP_PONG
+ opcode = ws.OP_TEXT
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- def check_ping(payload, chopsize=None, decode=True):
- self.ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
+ def check_length(length, chopsize=None):
+ payload = '*' * length
- self.check_frame(frame, True, op_pong, payload, decode=decode)
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- check_ping('') # 2_1
- check_ping('Hello, world!') # 2_2
- check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
- check_ping(b'\xfe' * 125, decode=False) # 2_4
- check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- self.close_connection(sock)
+ check_length(0) # 1_1_1
+ check_length(125) # 1_1_2
+ check_length(126) # 1_1_3
+ check_length(127) # 1_1_4
+ check_length(128) # 1_1_5
+ check_length(65535) # 1_1_6
+ check_length(65536) # 1_1_7
+ check_length(65536, chopsize=997) # 1_1_8
- # 2_5
+ close_connection(sock)
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PING, b'\xfe' * 126)
- self.check_close(sock, 1002)
+def test_asgi_websockets_1_2_1__1_2_8():
+ client.load('websockets/mirror')
- def test_asgi_websockets_2_7__2_9(self):
- self.load('websockets/mirror')
+ opcode = ws.OP_BINARY
- # 2_7
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ def check_length(length, chopsize=None):
+ payload = b'\xfe' * length
- self.ws.frame_write(sock, self.ws.OP_PONG, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_7'
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
- # 2_8
+ check_frame(frame, True, opcode, payload)
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_8'
+ check_length(0) # 1_2_1
+ check_length(125) # 1_2_2
+ check_length(126) # 1_2_3
+ check_length(127) # 1_2_4
+ check_length(128) # 1_2_5
+ check_length(65535) # 1_2_6
+ check_length(65536) # 1_2_7
+ check_length(65536, chopsize=997) # 1_2_8
- # 2_9
+ close_connection(sock)
- payload = 'ping payload'
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- self.ws.frame_write(sock, self.ws.OP_PING, payload)
+def test_asgi_websockets_2_1__2_6():
+ client.load('websockets/mirror')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, payload)
+ op_ping = ws.OP_PING
+ op_pong = ws.OP_PONG
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- def test_asgi_websockets_2_10__2_11(self):
- self.load('websockets/mirror')
+ def check_ping(payload, chopsize=None, decode=True):
+ ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
- # 2_10
+ check_frame(frame, True, op_pong, payload, decode=decode)
- _, sock, _ = self.ws.upgrade()
+ check_ping('') # 2_1
+ check_ping('Hello, world!') # 2_2
+ check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
+ check_ping(b'\xfe' * 125, decode=False) # 2_4
+ check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
- for i in range(0, 10):
- self.ws.frame_write(sock, self.ws.OP_PING, f'payload-{i}')
+ close_connection(sock)
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
+ # 2_5
- # 2_11
+ _, sock, _ = ws.upgrade()
- for i in range(0, 10):
- opcode = self.ws.OP_PING
- self.ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, b'\xfe' * 126)
+ check_close(sock, 1002)
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
- self.close_connection(sock)
+def test_asgi_websockets_2_7__2_9():
+ client.load('websockets/mirror')
- @pytest.mark.skip('not yet')
- def test_asgi_websockets_3_1__3_7(self):
- self.load('websockets/mirror')
+ # 2_7
- payload = 'Hello, world!'
+ _, sock, _ = ws.upgrade()
- # 3_1
+ ws.frame_write(sock, ws.OP_PONG, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_7'
- _, sock, _ = self.ws.upgrade()
+ # 2_8
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv1=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_8'
- # 3_2
+ # 2_9
- _, sock, _ = self.ws.upgrade()
+ payload = 'ping payload'
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv2=True)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ ws.frame_write(sock, ws.OP_PING, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, payload)
- self.check_close(sock, 1002, no_close=True)
+ close_connection(sock)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
- sock.close()
- # 3_3
+def test_asgi_websockets_2_10__2_11():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ # 2_10
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ for i in range(0, 10):
+ ws.frame_write(sock, ws.OP_PING, f'payload-{i}')
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv1=True, rsv2=True
- )
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- self.check_close(sock, 1002, no_close=True)
+ # 2_11
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
- sock.close()
+ for i in range(0, 10):
+ opcode = ws.OP_PING
+ ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
- # 3_4
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv3=True, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+@pytest.mark.skip('not yet')
+def test_asgi_websockets_3_1__3_7():
+ client.load('websockets/mirror')
- self.check_close(sock, 1002, no_close=True)
+ payload = 'Hello, world!'
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
- sock.close()
+ # 3_1
- # 3_5
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True)
+ check_close(sock, 1002)
- self.ws.frame_write(
- sock,
- self.ws.OP_BINARY,
- b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
- rsv1=True,
- rsv3=True,
- )
+ # 3_2
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 3_6
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv2=True)
+ ws.frame_write(sock, ws.OP_PING, '')
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(
- sock, self.ws.OP_PING, payload, rsv2=True, rsv3=True
- )
+ check_close(sock, 1002, no_close=True)
- self.check_close(sock, 1002)
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
+ sock.close()
- # 3_7
+ # 3_3
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock, self.ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True
- )
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.check_close(sock, 1002)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- def test_asgi_websockets_4_1_1__4_2_5(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True, rsv2=True)
- payload = 'Hello, world!'
+ check_close(sock, 1002, no_close=True)
- # 4_1_1
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 3_4
- self.ws.frame_write(sock, 0x03, '')
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_1_2
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv3=True, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, 0x04, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ check_close(sock, 1002, no_close=True)
- # 4_1_3
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 3_5
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(
+ sock,
+ ws.OP_BINARY,
+ b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
+ rsv1=True,
+ rsv3=True,
+ )
- self.ws.frame_write(sock, 0x05, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ check_close(sock, 1002)
- self.check_close(sock, 1002)
+ # 3_6
- # 4_1_4
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_PING, payload, rsv2=True, rsv3=True)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ # 3_7
- self.ws.frame_write(sock, 0x06, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ _, sock, _ = ws.upgrade()
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True)
- # 4_1_5
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+def test_asgi_websockets_4_1_1__4_2_5():
+ client.load('websockets/mirror')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ payload = 'Hello, world!'
- self.ws.frame_write(sock, 0x07, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_1
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_1
+ ws.frame_write(sock, 0x03, '')
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 4_1_2
- self.ws.frame_write(sock, 0x0B, '')
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_2
+ ws.frame_write(sock, 0x04, 'reserved opcode payload')
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 4_1_3
- self.ws.frame_write(sock, 0x0C, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_3
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, 0x05, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0D, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_4
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_4
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, 0x06, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0E, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_5
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_5
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, 0x07, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0F, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_2_1
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- def test_asgi_websockets_5_1__5_20(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, 0x0B, '')
+ check_close(sock, 1002)
- # 5_1
+ # 4_2_2
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PING, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, 0x0C, 'reserved opcode payload')
+ check_close(sock, 1002)
- # 5_2
+ # 4_2_3
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PONG, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- # 5_3
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, 0x0D, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_4
- # 5_4
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_4'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- # 5_5
+ ws.frame_write(sock, 0x0E, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_5
- # 5_6
+ _, sock, _ = ws.upgrade()
- ping_payload = 'ping payload'
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, 0x0F, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ check_close(sock, 1002)
- # 5_7
- ping_payload = 'ping payload'
+def test_asgi_websockets_5_1__5_20():
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_7'
+ # 5_1
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, ws.OP_PING, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ # 5_2
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ _, sock, _ = ws.upgrade()
- # 5_8
+ ws.frame_write(sock, ws.OP_PONG, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- ping_payload = 'ping payload'
+ # 5_3
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- # 5_9
+ # 5_4
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_4'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- # 5_10
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_5
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- # 5_11
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_6
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=True,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ ping_payload = 'ping payload'
- # 5_12
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- # 5_13
+ # 5_7
- _, sock, _ = self.ws.upgrade()
+ ping_payload = 'ping payload'
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_7'
- # 5_14
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=False,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- # 5_15
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_8
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment4', fin=True)
+ ping_payload = 'ping payload'
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, ping_payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- if frame['opcode'] == self.ws.OP_TEXT:
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
- frame = None
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- self.check_close(sock, 1002, frame=frame)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- # 5_16
+ # 5_9
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ # 5_10
- # 5_17
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=True)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ # 5_11
- # 5_18
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=True,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2')
- self.check_close(sock, 1002)
+ # 5_12
- # 5_19
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ # 5_13
- time.sleep(1)
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ # 5_14
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ _, sock, _ = ws.upgrade()
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=False,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- # 5_20
+ # 5_15
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment4', fin=True)
- time.sleep(1)
+ frame = ws.frame_read(sock)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
+ if frame['opcode'] == ws.OP_TEXT:
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
+ frame = None
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ check_close(sock, 1002, frame=frame)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_20'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ # 5_16
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ _, sock, _ = ws.upgrade()
- self.close_connection(sock)
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- def test_asgi_websockets_6_1_1__6_4_4(self):
- self.load('websockets/mirror')
+ # 5_17
- # 6_1_1
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ # 5_18
- # 6_1_2
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2')
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ # 5_19
- # 6_1_3
+ _, sock, _ = ws.upgrade()
- payload = 'middle frame payload'
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ time.sleep(1)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- # 6_2_1
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- payload = 'Hello-µ@ßöäüàá-UTF-8!!'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ # 5_20
- # 6_2_2
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload[:12], fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload[12:])
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ time.sleep(1)
- # 6_2_3
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_20'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- # 6_2_4
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+ close_connection(sock)
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+def test_asgi_websockets_6_1_1__6_4_4():
+ client.load('websockets/mirror')
- self.close_connection(sock)
+ # 6_1_1
- # Unit does not support UTF-8 validation
- #
- # # 6_3_1 FAIL
- #
- # payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
- # payload_2 = '\xed\xa0\x80'
- # payload_3 = '\x65\x64\x69\x74\x65\x64'
- #
- # payload = payload_1 + payload_2 + payload_3
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload)
- # self.check_close(sock, 1007)
- #
- # # 6_3_2 FAIL
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
- # self.check_close(sock, 1007)
- #
- # # 6_4_1 ... 6_4_4 FAIL
+ _, sock, _ = ws.upgrade()
- def test_asgi_websockets_7_1_1__7_5_1(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, '')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, '')
- # 7_1_1
+ # 6_1_2
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- payload = "Hello World!"
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, '')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ # 6_1_3
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ payload = 'middle frame payload'
- self.close_connection(sock)
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- # 7_1_2
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ # 6_2_1
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+ payload = 'Hello-µ@ßöäüàá-UTF-8!!'
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- # 7_1_3
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ # 6_2_2
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ ws.frame_write(sock, ws.OP_TEXT, payload[:12], fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload[12:])
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- sock.close()
+ # 6_2_3
- # 7_1_4
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ # 6_2_4
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
- sock.close()
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
- # 7_1_5
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+# Unit does not support UTF-8 validation
+#
+# # 6_3_1 FAIL
+#
+# payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+# payload_2 = '\xed\xa0\x80'
+# payload_3 = '\x65\x64\x69\x74\x65\x64'
+#
+# payload = payload_1 + payload_2 + payload_3
+#
+# ws.message(sock, ws.OP_TEXT, payload)
+# check_close(sock, 1007)
+#
+# # 6_3_2 FAIL
+#
+# _, sock, _ = ws.upgrade()
+#
+# ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
+# check_close(sock, 1007)
+#
+# # 6_4_1 ... 6_4_4 FAIL
- sock.close()
- # 7_1_6
+def test_asgi_websockets_7_1_1__7_5_1():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ # 7_1_1
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+ _, sock, _ = ws.upgrade()
- self.recvall(sock, read_timeout=1)
+ payload = "Hello World!"
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- sock.close()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
+
+ close_connection(sock)
+
+ # 7_1_2
+
+ _, sock, _ = ws.upgrade()
+
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+
+ check_close(sock)
+
+ # 7_1_3
+
+ _, sock, _ = ws.upgrade()
+
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
+
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- # 7_3_1
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 7_1_4
- self.ws.frame_write(sock, self.ws.OP_CLOSE, '')
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- # 7_3_2
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, 'a')
- self.check_close(sock, 1002)
+ sock.close()
- # 7_3_3
+ # 7_1_5
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- # 7_3_4
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- _, sock, _ = self.ws.upgrade()
+ sock.close()
- payload = self.ws.serialize_close(reason='Hello World!')
+ # 7_1_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- # 7_3_5
+ ws.frame_write(sock, ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- _, sock, _ = self.ws.upgrade()
+ client.recvall(sock, read_timeout=1)
- payload = self.ws.serialize_close(reason='*' * 123)
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ sock.close()
- # 7_3_6
+ # 7_3_1
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- payload = self.ws.serialize_close(reason='*' * 124)
+ ws.frame_write(sock, ws.OP_CLOSE, '')
+ check_close(sock)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ # 7_3_2
- # # 7_5_1 FAIL Unit does not support UTF-8 validation
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # payload = self.ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
- # '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
- #
- # self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- # self.check_close(sock, 1007)
+ _, sock, _ = ws.upgrade()
- def test_asgi_websockets_7_7_X__7_9_X(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_CLOSE, 'a')
+ check_close(sock, 1002)
- valid_codes = [
- 1000,
- 1001,
- 1002,
- 1003,
- 1007,
- 1008,
- 1009,
- 1010,
- 1011,
- 3000,
- 3999,
- 4000,
- 4999,
- ]
+ # 7_3_3
- invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+ _, sock, _ = ws.upgrade()
- for code in valid_codes:
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock)
- payload = self.ws.serialize_close(code=code)
+ # 7_3_4
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- for code in invalid_codes:
- _, sock, _ = self.ws.upgrade()
+ payload = ws.serialize_close(reason='Hello World!')
- payload = self.ws.serialize_close(code=code)
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ # 7_3_5
- def test_asgi_websockets_7_13_1__7_13_2(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- # 7_13_1
+ payload = ws.serialize_close(reason='*' * 123)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- payload = self.ws.serialize_close(code=5000)
+ # 7_3_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 7_13_2
+ payload = ws.serialize_close(reason='*' * 124)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
- payload = struct.pack('!I', 65536) + ''.encode('utf-8')
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+# # 7_5_1 FAIL Unit does not support UTF-8 validation
+#
+# _, sock, _ = ws.upgrade()
+#
+# payload = ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
+# '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
+#
+# ws.frame_write(sock, ws.OP_CLOSE, payload)
+# check_close(sock, 1007)
- def test_asgi_websockets_9_1_1__9_6_6(self, is_unsafe):
- if not is_unsafe:
- pytest.skip('unsafe, long run')
- self.load('websockets/mirror')
+def test_asgi_websockets_7_7_X__7_9_X():
+ client.load('websockets/mirror')
- assert 'success' in self.conf(
- {
- 'http': {
- 'websocket': {
- 'max_frame_size': 33554432,
- 'keepalive_interval': 0,
- }
+ valid_codes = [
+ 1000,
+ 1001,
+ 1002,
+ 1003,
+ 1007,
+ 1008,
+ 1009,
+ 1010,
+ 1011,
+ 3000,
+ 3999,
+ 4000,
+ 4999,
+ ]
+
+ invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+
+ for code in valid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
+
+ for code in invalid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_asgi_websockets_7_13_1__7_13_2():
+ client.load('websockets/mirror')
+
+ # 7_13_1
+
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=5000)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+ # 7_13_2
+
+ _, sock, _ = ws.upgrade()
+
+ payload = struct.pack('!I', 65536) + ''.encode('utf-8')
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_asgi_websockets_9_1_1__9_6_6(is_unsafe, system):
+ if not is_unsafe:
+ pytest.skip('unsafe, long run')
+
+ client.load('websockets/mirror')
+
+ assert 'success' in client.conf(
+ {
+ 'http': {
+ 'websocket': {
+ 'max_frame_size': 33554432,
+ 'keepalive_interval': 0,
}
- },
- 'settings',
- ), 'increase max_frame_size and keepalive_interval'
-
- _, sock, _ = self.ws.upgrade()
-
- op_text = self.ws.OP_TEXT
- op_binary = self.ws.OP_BINARY
-
- def check_payload(opcode, length, chopsize=None):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * length
- else:
- payload = b'*' * length
+ }
+ },
+ 'settings',
+ ), 'increase max_frame_size and keepalive_interval'
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock, read_timeout=5)
- self.check_frame(frame, True, opcode, payload)
+ _, sock, _ = ws.upgrade()
- def check_message(opcode, f_size):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * 4 * 2**20
- else:
- payload = b'*' * 4 * 2**20
+ op_text = ws.OP_TEXT
+ op_binary = ws.OP_BINARY
- self.ws.message(sock, opcode, payload, fragmention_size=f_size)
- frame = self.ws.frame_read(sock, read_timeout=5)
- self.check_frame(frame, True, opcode, payload)
+ def check_payload(opcode, length, chopsize=None):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * length
+ else:
+ payload = b'*' * length
- check_payload(op_text, 64 * 2**10) # 9_1_1
- check_payload(op_text, 256 * 2**10) # 9_1_2
- check_payload(op_text, 2**20) # 9_1_3
- check_payload(op_text, 4 * 2**20) # 9_1_4
- check_payload(op_text, 8 * 2**20) # 9_1_5
- check_payload(op_text, 16 * 2**20) # 9_1_6
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock, read_timeout=5)
+ check_frame(frame, True, opcode, payload)
- check_payload(op_binary, 64 * 2**10) # 9_2_1
- check_payload(op_binary, 256 * 2**10) # 9_2_2
- check_payload(op_binary, 2**20) # 9_2_3
- check_payload(op_binary, 4 * 2**20) # 9_2_4
- check_payload(op_binary, 8 * 2**20) # 9_2_5
- check_payload(op_binary, 16 * 2**20) # 9_2_6
+ def check_message(opcode, f_size):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * 4 * 2**20
+ else:
+ payload = b'*' * 4 * 2**20
- if option.system != 'Darwin' and option.system != 'FreeBSD':
- check_message(op_text, 64) # 9_3_1
- check_message(op_text, 256) # 9_3_2
- check_message(op_text, 2**10) # 9_3_3
- check_message(op_text, 4 * 2**10) # 9_3_4
- check_message(op_text, 16 * 2**10) # 9_3_5
- check_message(op_text, 64 * 2**10) # 9_3_6
- check_message(op_text, 256 * 2**10) # 9_3_7
- check_message(op_text, 2**20) # 9_3_8
- check_message(op_text, 4 * 2**20) # 9_3_9
+ ws.message(sock, opcode, payload, fragmention_size=f_size)
+ frame = ws.frame_read(sock, read_timeout=5)
+ check_frame(frame, True, opcode, payload)
- check_message(op_binary, 64) # 9_4_1
- check_message(op_binary, 256) # 9_4_2
- check_message(op_binary, 2**10) # 9_4_3
- check_message(op_binary, 4 * 2**10) # 9_4_4
- check_message(op_binary, 16 * 2**10) # 9_4_5
- check_message(op_binary, 64 * 2**10) # 9_4_6
- check_message(op_binary, 256 * 2**10) # 9_4_7
- check_message(op_binary, 2**20) # 9_4_8
- check_message(op_binary, 4 * 2**20) # 9_4_9
+ check_payload(op_text, 64 * 2**10) # 9_1_1
+ check_payload(op_text, 256 * 2**10) # 9_1_2
+ check_payload(op_text, 2**20) # 9_1_3
+ check_payload(op_text, 4 * 2**20) # 9_1_4
+ check_payload(op_text, 8 * 2**20) # 9_1_5
+ check_payload(op_text, 16 * 2**20) # 9_1_6
- check_payload(op_text, 2**20, chopsize=64) # 9_5_1
- check_payload(op_text, 2**20, chopsize=128) # 9_5_2
- check_payload(op_text, 2**20, chopsize=256) # 9_5_3
- check_payload(op_text, 2**20, chopsize=512) # 9_5_4
- check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
- check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
+ check_payload(op_binary, 64 * 2**10) # 9_2_1
+ check_payload(op_binary, 256 * 2**10) # 9_2_2
+ check_payload(op_binary, 2**20) # 9_2_3
+ check_payload(op_binary, 4 * 2**20) # 9_2_4
+ check_payload(op_binary, 8 * 2**20) # 9_2_5
+ check_payload(op_binary, 16 * 2**20) # 9_2_6
- check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
- check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
- check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
- check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
- check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
- check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
+ if system not in ['Darwin', 'FreeBSD']:
+ check_message(op_text, 64) # 9_3_1
+ check_message(op_text, 256) # 9_3_2
+ check_message(op_text, 2**10) # 9_3_3
+ check_message(op_text, 4 * 2**10) # 9_3_4
+ check_message(op_text, 16 * 2**10) # 9_3_5
+ check_message(op_text, 64 * 2**10) # 9_3_6
+ check_message(op_text, 256 * 2**10) # 9_3_7
+ check_message(op_text, 2**20) # 9_3_8
+ check_message(op_text, 4 * 2**20) # 9_3_9
- self.close_connection(sock)
+ check_message(op_binary, 64) # 9_4_1
+ check_message(op_binary, 256) # 9_4_2
+ check_message(op_binary, 2**10) # 9_4_3
+ check_message(op_binary, 4 * 2**10) # 9_4_4
+ check_message(op_binary, 16 * 2**10) # 9_4_5
+ check_message(op_binary, 64 * 2**10) # 9_4_6
+ check_message(op_binary, 256 * 2**10) # 9_4_7
+ check_message(op_binary, 2**20) # 9_4_8
+ check_message(op_binary, 4 * 2**20) # 9_4_9
- def test_asgi_websockets_10_1_1(self):
- self.load('websockets/mirror')
+ check_payload(op_text, 2**20, chopsize=64) # 9_5_1
+ check_payload(op_text, 2**20, chopsize=128) # 9_5_2
+ check_payload(op_text, 2**20, chopsize=256) # 9_5_3
+ check_payload(op_text, 2**20, chopsize=512) # 9_5_4
+ check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
+ check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
- _, sock, _ = self.ws.upgrade()
+ check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
+ check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
+ check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
+ check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
+ check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
+ check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
- payload = '*' * 65536
+ close_connection(sock)
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1300)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+def test_asgi_websockets_10_1_1():
+ client.load('websockets/mirror')
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- # settings
+ payload = '*' * 65536
- def test_asgi_websockets_max_frame_size(self):
- self.load('websockets/mirror')
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1300)
- assert 'success' in self.conf(
- {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
- ), 'configure max_frame_size'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- payload = '*' * 94
- opcode = self.ws.OP_TEXT
- self.ws.frame_write(sock, opcode, payload) # frame length is 100
+# settings
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
- payload = '*' * 95
+def test_asgi_websockets_max_frame_size():
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, opcode, payload) # frame length is 101
- self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
+ ), 'configure max_frame_size'
- def test_asgi_websockets_read_timeout(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- assert 'success' in self.conf(
- {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
- ), 'configure read_timeout'
+ payload = '*' * 94
+ opcode = ws.OP_TEXT
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, opcode, payload) # frame length is 100
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- time.sleep(2)
+ payload = '*' * 95
- self.check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
+ ws.frame_write(sock, opcode, payload) # frame length is 101
+ check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
- def test_asgi_websockets_keepalive_interval(self):
- self.load('websockets/mirror')
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
- ), 'configure keepalive_interval'
+def test_asgi_websockets_read_timeout():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
+ ), 'configure read_timeout'
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ _, sock, _ = ws.upgrade()
- time.sleep(2)
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PING, '') # PING frame
+ time.sleep(2)
- sock.close()
+ check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
- def test_asgi_websockets_client_locks_app(self):
- self.load('websockets/mirror')
- message = 'blah'
+def test_asgi_websockets_keepalive_interval():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
+ ), 'configure keepalive_interval'
- assert 'success' in self.conf({}), 'remove app'
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
- frame = self.ws.frame_read(sock)
+ time.sleep(2)
- assert message == frame['data'].decode('utf-8'), 'client'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PING, '') # PING frame
- sock.close()
+ sock.close()
+
+
+def test_asgi_websockets_client_locks_app():
+ client.load('websockets/mirror')
+
+ message = 'blah'
+
+ _, sock, _ = ws.upgrade()
+
+ assert 'success' in client.conf({}), 'remove app'
+
+ ws.frame_write(sock, ws.OP_TEXT, message)
+
+ frame = ws.frame_read(sock)
+
+ assert message == frame['data'].decode('utf-8'), 'client'
+
+ sock.close()
diff --git a/test/test_client_ip.py b/test/test_client_ip.py
index 6520d5e2..82c76718 100644
--- a/test/test_client_ip.py
+++ b/test/test_client_ip.py
@@ -1,181 +1,186 @@
-from unit.applications.lang.python import TestApplicationPython
+import pytest
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'any'}}
-class TestClientIP(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def client_ip(self, options):
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {
- "client_ip": options,
- "pass": "applications/client_ip",
- },
- "[::1]:7082": {
- "client_ip": options,
- "pass": "applications/client_ip",
- },
- f"unix:{option.temp_dir}/sock": {
- "client_ip": options,
- "pass": "applications/client_ip",
- },
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ client.load('client_ip')
+
+
+def client_ip(options):
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {
+ "client_ip": options,
+ "pass": "applications/client_ip",
},
- 'listeners',
- ), 'listeners configure'
+ "[::1]:7082": {
+ "client_ip": options,
+ "pass": "applications/client_ip",
+ },
+ f"unix:{option.temp_dir}/sock": {
+ "client_ip": options,
+ "pass": "applications/client_ip",
+ },
+ },
+ 'listeners',
+ ), 'listeners configure'
+
+
+def get_xff(xff, sock_type='ipv4'):
+ address = {
+ 'ipv4': ('127.0.0.1', 7081),
+ 'ipv6': ('::1', 7082),
+ 'unix': (f'{option.temp_dir}/sock', None),
+ }
+ (addr, port) = address[sock_type]
+
+ return client.get(
+ sock_type=sock_type,
+ addr=addr,
+ port=port,
+ headers={'Connection': 'close', 'X-Forwarded-For': xff},
+ )['body']
+
+
+def test_client_ip_single_ip():
+ client_ip({'header': 'X-Forwarded-For', 'source': '123.123.123.123'})
+
+ assert client.get(port=7081)['body'] == '127.0.0.1', 'ipv4 default'
+ assert (
+ client.get(sock_type='ipv6', port=7082)['body'] == '::1'
+ ), 'ipv6 default'
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source'
+ assert get_xff('blah') == '127.0.0.1', 'bad header'
+ assert get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
+
+ client_ip({'header': 'X-Forwarded-For', 'source': '127.0.0.1'})
- def get_xff(self, xff, sock_type='ipv4'):
- address = {
- 'ipv4': ('127.0.0.1', 7081),
- 'ipv6': ('::1', 7082),
- 'unix': (f'{option.temp_dir}/sock', None),
+ assert client.get(port=7081)['body'] == '127.0.0.1', 'ipv4 default 2'
+ assert (
+ client.get(sock_type='ipv6', port=7082)['body'] == '::1'
+ ), 'ipv6 default 2'
+ assert get_xff('1.1.1.1') == '1.1.1.1', 'replace'
+ assert get_xff('blah') == '127.0.0.1', 'bad header 2'
+ assert get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6 2'
+
+ client_ip({'header': 'X-Forwarded-For', 'source': '!127.0.0.1'})
+
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source 3'
+ assert get_xff('1.1.1.1', 'ipv6') == '1.1.1.1', 'replace 2'
+
+
+def test_client_ip_ipv4():
+ client_ip({'header': 'X-Forwarded-For', 'source': '127.0.0.1'})
+
+ assert get_xff('8.8.8.8, 84.23.23.11') == '84.23.23.11', 'xff replace'
+ assert (
+ get_xff('8.8.8.8, 84.23.23.11, 127.0.0.1') == '127.0.0.1'
+ ), 'xff replace 2'
+ assert (
+ get_xff(['8.8.8.8', '127.0.0.1, 10.0.1.1']) == '10.0.1.1'
+ ), 'xff replace multi'
+
+
+def test_client_ip_ipv6():
+ client_ip({'header': 'X-Forwarded-For', 'source': '::1'})
+
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
+
+ for ip in [
+ 'f607:7403:1e4b:6c66:33b2:843f:2517:da27',
+ '2001:db8:3c4d:15::1a2f:1a2b',
+ '2001::3c4d:15:1a2f:1a2b',
+ '::11.22.33.44',
+ ]:
+ assert get_xff(ip, 'ipv6') == ip, 'replace'
+
+
+def test_client_ip_unix():
+ client_ip({'header': 'X-Forwarded-For', 'source': 'unix'})
+
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
+ assert get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
+
+ for ip in [
+ '1.1.1.1',
+ '::11.22.33.44',
+ ]:
+ assert get_xff(ip, 'unix') == ip, 'replace'
+
+
+def test_client_ip_recursive():
+ client_ip(
+ {
+ 'header': 'X-Forwarded-For',
+ 'recursive': True,
+ 'source': ['127.0.0.1', '10.50.0.17', '10.5.2.1'],
}
- (addr, port) = address[sock_type]
-
- return self.get(
- sock_type=sock_type,
- addr=addr,
- port=port,
- headers={'Connection': 'close', 'X-Forwarded-For': xff},
- )['body']
-
- def setup_method(self):
- self.load('client_ip')
-
- def test_client_ip_single_ip(self):
- self.client_ip(
- {'header': 'X-Forwarded-For', 'source': '123.123.123.123'}
- )
-
- assert self.get(port=7081)['body'] == '127.0.0.1', 'ipv4 default'
- assert (
- self.get(sock_type='ipv6', port=7082)['body'] == '::1'
- ), 'ipv6 default'
- assert self.get_xff('1.1.1.1') == '127.0.0.1', 'bad source'
- assert self.get_xff('blah') == '127.0.0.1', 'bad header'
- assert self.get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
-
- self.client_ip({'header': 'X-Forwarded-For', 'source': '127.0.0.1'})
-
- assert self.get(port=7081)['body'] == '127.0.0.1', 'ipv4 default 2'
- assert (
- self.get(sock_type='ipv6', port=7082)['body'] == '::1'
- ), 'ipv6 default 2'
- assert self.get_xff('1.1.1.1') == '1.1.1.1', 'replace'
- assert self.get_xff('blah') == '127.0.0.1', 'bad header 2'
- assert self.get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6 2'
-
- self.client_ip({'header': 'X-Forwarded-For', 'source': '!127.0.0.1'})
-
- assert self.get_xff('1.1.1.1') == '127.0.0.1', 'bad source 3'
- assert self.get_xff('1.1.1.1', 'ipv6') == '1.1.1.1', 'replace 2'
-
- def test_client_ip_ipv4(self):
- self.client_ip({'header': 'X-Forwarded-For', 'source': '127.0.0.1'})
-
- assert (
- self.get_xff('8.8.8.8, 84.23.23.11') == '84.23.23.11'
- ), 'xff replace'
- assert (
- self.get_xff('8.8.8.8, 84.23.23.11, 127.0.0.1') == '127.0.0.1'
- ), 'xff replace 2'
- assert (
- self.get_xff(['8.8.8.8', '127.0.0.1, 10.0.1.1']) == '10.0.1.1'
- ), 'xff replace multi'
-
- def test_client_ip_ipv6(self):
- self.client_ip({'header': 'X-Forwarded-For', 'source': '::1'})
-
- assert self.get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
-
- for ip in [
- 'f607:7403:1e4b:6c66:33b2:843f:2517:da27',
- '2001:db8:3c4d:15::1a2f:1a2b',
- '2001::3c4d:15:1a2f:1a2b',
- '::11.22.33.44',
- ]:
- assert self.get_xff(ip, 'ipv6') == ip, 'replace'
-
- def test_client_ip_unix(self, temp_dir):
- self.client_ip({'header': 'X-Forwarded-For', 'source': 'unix'})
-
- assert self.get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
- assert self.get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
-
- for ip in [
- '1.1.1.1',
- '::11.22.33.44',
- ]:
- assert self.get_xff(ip, 'unix') == ip, 'replace'
-
- def test_client_ip_recursive(self):
- self.client_ip(
- {
- 'header': 'X-Forwarded-For',
- 'recursive': True,
- 'source': ['127.0.0.1', '10.50.0.17', '10.5.2.1'],
+ )
+
+ assert get_xff('1.1.1.1') == '1.1.1.1', 'xff chain'
+ assert get_xff('1.1.1.1, 10.5.2.1') == '1.1.1.1', 'xff chain 2'
+ assert get_xff('8.8.8.8, 1.1.1.1, 10.5.2.1') == '1.1.1.1', 'xff chain 3'
+ assert (
+ get_xff('10.50.0.17, 10.5.2.1, 10.5.2.1') == '10.50.0.17'
+ ), 'xff chain 4'
+ assert (
+ get_xff(['8.8.8.8', '1.1.1.1, 127.0.0.1']) == '1.1.1.1'
+ ), 'xff replace multi'
+ assert (
+ get_xff(['8.8.8.8', '1.1.1.1, 127.0.0.1', '10.5.2.1']) == '1.1.1.1'
+ ), 'xff replace multi 2'
+ assert (
+ get_xff(['10.5.2.1', '10.50.0.17, 1.1.1.1', '10.5.2.1']) == '1.1.1.1'
+ ), 'xff replace multi 3'
+ assert (
+ get_xff('8.8.8.8, 2001:db8:3c4d:15::1a2f:1a2b, 127.0.0.1')
+ == '2001:db8:3c4d:15::1a2f:1a2b'
+ ), 'xff chain ipv6'
+
+
+def test_client_ip_case_insensitive():
+ client_ip({'header': 'x-forwarded-for', 'source': '127.0.0.1'})
+
+ assert get_xff('1.1.1.1') == '1.1.1.1', 'case insensitive'
+
+
+def test_client_ip_empty_source():
+ client_ip({'header': 'X-Forwarded-For', 'source': []})
+
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'empty source'
+
+
+def test_client_ip_invalid():
+ assert 'error' in client.conf(
+ {
+ "127.0.0.1:7081": {
+ "client_ip": {"source": '127.0.0.1'},
+ "pass": "applications/client_ip",
}
- )
-
- assert self.get_xff('1.1.1.1') == '1.1.1.1', 'xff chain'
- assert self.get_xff('1.1.1.1, 10.5.2.1') == '1.1.1.1', 'xff chain 2'
- assert (
- self.get_xff('8.8.8.8, 1.1.1.1, 10.5.2.1') == '1.1.1.1'
- ), 'xff chain 3'
- assert (
- self.get_xff('10.50.0.17, 10.5.2.1, 10.5.2.1') == '10.50.0.17'
- ), 'xff chain 4'
- assert (
- self.get_xff(['8.8.8.8', '1.1.1.1, 127.0.0.1']) == '1.1.1.1'
- ), 'xff replace multi'
- assert (
- self.get_xff(['8.8.8.8', '1.1.1.1, 127.0.0.1', '10.5.2.1'])
- == '1.1.1.1'
- ), 'xff replace multi 2'
- assert (
- self.get_xff(['10.5.2.1', '10.50.0.17, 1.1.1.1', '10.5.2.1'])
- == '1.1.1.1'
- ), 'xff replace multi 3'
- assert (
- self.get_xff('8.8.8.8, 2001:db8:3c4d:15::1a2f:1a2b, 127.0.0.1')
- == '2001:db8:3c4d:15::1a2f:1a2b'
- ), 'xff chain ipv6'
-
- def test_client_ip_case_insensitive(self):
- self.client_ip({'header': 'x-forwarded-for', 'source': '127.0.0.1'})
-
- assert self.get_xff('1.1.1.1') == '1.1.1.1', 'case insensitive'
-
- def test_client_ip_empty_source(self):
- self.client_ip({'header': 'X-Forwarded-For', 'source': []})
-
- assert self.get_xff('1.1.1.1') == '127.0.0.1', 'empty source'
-
- def test_client_ip_invalid(self):
- assert 'error' in self.conf(
+ },
+ 'listeners',
+ ), 'invalid header'
+
+ def check_invalid_source(source):
+ assert 'error' in client.conf(
{
"127.0.0.1:7081": {
- "client_ip": {"source": '127.0.0.1'},
+ "client_ip": {
+ "header": "X-Forwarded-For",
+ "source": source,
+ },
"pass": "applications/client_ip",
}
},
'listeners',
- ), 'invalid header'
-
- def check_invalid_source(source):
- assert 'error' in self.conf(
- {
- "127.0.0.1:7081": {
- "client_ip": {
- "header": "X-Forwarded-For",
- "source": source,
- },
- "pass": "applications/client_ip",
- }
- },
- 'listeners',
- ), 'invalid source'
-
- check_invalid_source(None)
- check_invalid_source('a')
- check_invalid_source(['a'])
+ ), 'invalid source'
+
+ check_invalid_source(None)
+ check_invalid_source('a')
+ check_invalid_source(['a'])
diff --git a/test/test_configuration.py b/test/test_configuration.py
index e3ddc891..19a2a1a5 100644
--- a/test/test_configuration.py
+++ b/test/test_configuration.py
@@ -1,439 +1,465 @@
import socket
import pytest
-from unit.control import TestControl
-from unit.option import option
+from unit.control import Control
+prerequisites = {'modules': {'python': 'any'}}
-class TestConfiguration(TestControl):
- prerequisites = {'modules': {'python': 'any'}}
+client = Control()
- def try_addr(self, addr):
- return self.conf(
- {
- "listeners": {addr: {"pass": "routes"}},
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- )
- def test_json_empty(self):
- assert 'error' in self.conf(''), 'empty'
+def try_addr(addr):
+ return client.conf(
+ {
+ "listeners": {addr: {"pass": "routes"}},
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ )
- def test_json_leading_zero(self):
- assert 'error' in self.conf('00'), 'leading zero'
- def test_json_unicode(self):
- assert 'success' in self.conf(
- u"""
- {
- "ap\u0070": {
- "type": "\u0070ython",
- "processes": { "spare": 0 },
- "path": "\u002Fapp",
- "module": "wsgi"
- }
+def test_json_empty():
+ assert 'error' in client.conf(''), 'empty'
+
+
+def test_json_leading_zero():
+ assert 'error' in client.conf('00'), 'leading zero'
+
+
+def test_json_unicode():
+ assert 'success' in client.conf(
+ """
+ {
+ "ap\u0070": {
+ "type": "\u0070ython",
+ "processes": { "spare": 0 },
+ "path": "\u002Fapp",
+ "module": "wsgi"
}
- """,
- 'applications',
- ), 'unicode'
+ }
+ """,
+ 'applications',
+ ), 'unicode'
+
+ assert client.conf_get('applications') == {
+ "app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ }, 'unicode get'
- assert self.conf_get('applications') == {
- "app": {
+
+def test_json_unicode_2():
+ assert 'success' in client.conf(
+ {
+ "приложение": {
"type": "python",
"processes": {"spare": 0},
"path": "/app",
"module": "wsgi",
}
- }, 'unicode get'
+ },
+ 'applications',
+ ), 'unicode 2'
- def test_json_unicode_2(self):
- assert 'success' in self.conf(
- {
- "приложение": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- },
- 'applications',
- ), 'unicode 2'
+ assert 'приложение' in client.conf_get('applications')
- assert 'приложение' in self.conf_get('applications'), 'unicode 2 get'
- def test_json_unicode_number(self):
- assert 'success' in self.conf(
- u"""
- {
- "app": {
- "type": "python",
- "processes": { "spare": \u0030 },
- "path": "/app",
- "module": "wsgi"
- }
+def test_json_unicode_number():
+ assert 'success' in client.conf(
+ """
+ {
+ "app": {
+ "type": "python",
+ "processes": { "spare": \u0030 },
+ "path": "/app",
+ "module": "wsgi"
}
- """,
- 'applications',
- ), 'unicode number'
+ }
+ """,
+ 'applications',
+ ), 'unicode number'
- def test_json_utf8_bom(self):
- assert 'success' in self.conf(
- b"""\xEF\xBB\xBF
- {
- "app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi"
- }
- }
- """,
- 'applications',
- ), 'UTF-8 BOM'
-
- def test_json_comment_single_line(self):
- assert 'success' in self.conf(
- b"""
- // this is bridge
- {
- "//app": {
- "type": "python", // end line
- "processes": {"spare": 0},
- // inside of block
- "path": "/app",
- "module": "wsgi"
- }
- // double //
+
+def test_json_utf8_bom():
+ assert 'success' in client.conf(
+ b"""\xEF\xBB\xBF
+ {
+ "app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi"
}
- // end of json \xEF\t
- """,
- 'applications',
- ), 'single line comments'
-
- def test_json_comment_multi_line(self):
- assert 'success' in self.conf(
- b"""
- /* this is bridge */
- {
- "/*app": {
- /**
- * multiple lines
- **/
- "type": "python",
- "processes": /* inline */ {"spare": 0},
- "path": "/app",
- "module": "wsgi"
- /*
- // end of block */
- }
- /* blah * / blah /* blah */
+ }
+ """,
+ 'applications',
+ ), 'UTF-8 BOM'
+
+
+def test_json_comment_single_line():
+ assert 'success' in client.conf(
+ b"""
+ // this is bridge
+ {
+ "//app": {
+ "type": "python", // end line
+ "processes": {"spare": 0},
+ // inside of block
+ "path": "/app",
+ "module": "wsgi"
}
- /* end of json \xEF\t\b */
- """,
- 'applications',
- ), 'multi line comments'
-
- def test_json_comment_invalid(self):
- assert 'error' in self.conf(b'/{}', 'applications'), 'slash'
- assert 'error' in self.conf(b'//{}', 'applications'), 'comment'
- assert 'error' in self.conf(b'{} /', 'applications'), 'slash end'
- assert 'error' in self.conf(b'/*{}', 'applications'), 'slash star'
- assert 'error' in self.conf(b'{} /*', 'applications'), 'slash star end'
-
- def test_applications_open_brace(self):
- assert 'error' in self.conf('{', 'applications'), 'open brace'
-
- def test_applications_string(self):
- assert 'error' in self.conf('"{}"', 'applications'), 'string'
-
- @pytest.mark.skip('not yet, unsafe')
- def test_applications_type_only(self):
- assert 'error' in self.conf(
- {"app": {"type": "python"}}, 'applications'
- ), 'type only'
-
- def test_applications_miss_quote(self):
- assert 'error' in self.conf(
- """
- {
- app": {
- "type": "python",
- "processes": { "spare": 0 },
- "path": "/app",
- "module": "wsgi"
- }
+ // double //
+ }
+ // end of json \xEF\t
+ """,
+ 'applications',
+ ), 'single line comments'
+
+
+def test_json_comment_multi_line():
+ assert 'success' in client.conf(
+ b"""
+ /* this is bridge */
+ {
+ "/*app": {
+ /**
+ * multiple lines
+ **/
+ "type": "python",
+ "processes": /* inline */ {"spare": 0},
+ "path": "/app",
+ "module": "wsgi"
+ /*
+ // end of block */
}
- """,
- 'applications',
- ), 'miss quote'
+ /* blah * / blah /* blah */
+ }
+ /* end of json \xEF\t\b */
+ """,
+ 'applications',
+ ), 'multi line comments'
- def test_applications_miss_colon(self):
- assert 'error' in self.conf(
- """
- {
- "app" {
- "type": "python",
- "processes": { "spare": 0 },
- "path": "/app",
- "module": "wsgi"
- }
+
+def test_json_comment_invalid():
+ assert 'error' in client.conf(b'/{}', 'applications'), 'slash'
+ assert 'error' in client.conf(b'//{}', 'applications'), 'comment'
+ assert 'error' in client.conf(b'{} /', 'applications'), 'slash end'
+ assert 'error' in client.conf(b'/*{}', 'applications'), 'slash star'
+ assert 'error' in client.conf(b'{} /*', 'applications'), 'slash star end'
+
+
+def test_applications_open_brace():
+ assert 'error' in client.conf('{', 'applications'), 'open brace'
+
+
+def test_applications_string():
+ assert 'error' in client.conf('"{}"', 'applications'), 'string'
+
+
+@pytest.mark.skip('not yet, unsafe')
+def test_applications_type_only():
+ assert 'error' in client.conf(
+ {"app": {"type": "python"}}, 'applications'
+ ), 'type only'
+
+
+def test_applications_miss_quote():
+ assert 'error' in client.conf(
+ """
+ {
+ app": {
+ "type": "python",
+ "processes": { "spare": 0 },
+ "path": "/app",
+ "module": "wsgi"
}
- """,
- 'applications',
- ), 'miss colon'
+ }
+ """,
+ 'applications',
+ ), 'miss quote'
- def test_applications_miss_comma(self):
- assert 'error' in self.conf(
- """
- {
- "app": {
- "type": "python"
- "processes": { "spare": 0 },
- "path": "/app",
- "module": "wsgi"
- }
+
+def test_applications_miss_colon():
+ assert 'error' in client.conf(
+ """
+ {
+ "app" {
+ "type": "python",
+ "processes": { "spare": 0 },
+ "path": "/app",
+ "module": "wsgi"
}
- """,
- 'applications',
- ), 'miss comma'
+ }
+ """,
+ 'applications',
+ ), 'miss colon'
- def test_applications_skip_spaces(self):
- assert 'success' in self.conf(
- b'{ \n\r\t}', 'applications'
- ), 'skip spaces'
- def test_applications_relative_path(self):
- assert 'success' in self.conf(
- {
- "app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "../app",
- "module": "wsgi",
- }
- },
- 'applications',
- ), 'relative path'
+def test_applications_miss_comma():
+ assert 'error' in client.conf(
+ """
+ {
+ "app": {
+ "type": "python"
+ "processes": { "spare": 0 },
+ "path": "/app",
+ "module": "wsgi"
+ }
+ }
+ """,
+ 'applications',
+ ), 'miss comma'
- @pytest.mark.skip('not yet, unsafe')
- def test_listeners_empty(self):
- assert 'error' in self.conf(
- {"*:7080": {}}, 'listeners'
- ), 'listener empty'
- def test_listeners_no_app(self):
- assert 'error' in self.conf(
- {"*:7080": {"pass": "applications/app"}}, 'listeners'
- ), 'listeners no app'
+def test_applications_skip_spaces():
+ assert 'success' in client.conf(b'{ \n\r\t}', 'applications'), 'skip spaces'
- def test_listeners_unix_abstract(self):
- if option.system != 'Linux':
- assert 'error' in self.try_addr("unix:@sock"), 'abstract at'
- pytest.skip('not yet')
+def test_applications_relative_path():
+ assert 'success' in client.conf(
+ {
+ "app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "../app",
+ "module": "wsgi",
+ }
+ },
+ 'applications',
+ ), 'relative path'
- assert 'error' in self.try_addr("unix:\0soc"), 'abstract \0'
- assert 'error' in self.try_addr("unix:\u0000soc"), 'abstract \0 unicode'
- def test_listeners_addr(self):
- assert 'success' in self.try_addr("*:7080"), 'wildcard'
- assert 'success' in self.try_addr("127.0.0.1:7081"), 'explicit'
- assert 'success' in self.try_addr("[::1]:7082"), 'explicit ipv6'
+@pytest.mark.skip('not yet, unsafe')
+def test_listeners_empty():
+ assert 'error' in client.conf({"*:7080": {}}, 'listeners'), 'listener empty'
- def test_listeners_addr_error(self):
- assert 'error' in self.try_addr("127.0.0.1"), 'no port'
- def test_listeners_addr_error_2(self, skip_alert):
- skip_alert(r'bind.*failed', r'failed to apply new conf')
+def test_listeners_no_app():
+ assert 'error' in client.conf(
+ {"*:7080": {"pass": "applications/app"}}, 'listeners'
+ ), 'listeners no app'
- assert 'error' in self.try_addr(
- "[f607:7403:1e4b:6c66:33b2:843f:2517:da27]:7080"
- )
- def test_listeners_port_release(self):
- for i in range(10):
- fail = False
- with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
- s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+def test_listeners_unix_abstract(system):
+ if system != 'Linux':
+ assert 'error' in try_addr("unix:@sock"), 'abstract at'
- self.conf(
- {
- "listeners": {"127.0.0.1:7080": {"pass": "routes"}},
- "routes": [],
- }
- )
+ pytest.skip('not yet')
- resp = self.conf({"listeners": {}, "applications": {}})
+ assert 'error' in try_addr("unix:\0soc"), 'abstract \0'
+ assert 'error' in try_addr("unix:\u0000soc"), 'abstract \0 unicode'
- try:
- s.bind(('127.0.0.1', 7080))
- s.listen()
- except OSError:
- fail = True
+def test_listeners_addr():
+ assert 'success' in try_addr("*:7080"), 'wildcard'
+ assert 'success' in try_addr("127.0.0.1:7081"), 'explicit'
+ assert 'success' in try_addr("[::1]:7082"), 'explicit ipv6'
- if fail:
- pytest.fail('cannot bind or listen to the address')
- assert 'success' in resp, 'port release'
+def test_listeners_addr_error():
+ assert 'error' in try_addr("127.0.0.1"), 'no port'
- def test_json_application_name_large(self):
- name = "X" * 1024 * 1024
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": f"applications/{name}"}},
- "applications": {
- name: {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- },
- }
- )
+def test_listeners_addr_error_2(skip_alert):
+ skip_alert(r'bind.*failed', r'failed to apply new conf')
- @pytest.mark.skip('not yet')
- def test_json_application_many(self):
- apps = 999
+ assert 'error' in try_addr("[f607:7403:1e4b:6c66:33b2:843f:2517:da27]:7080")
- conf = {
- "applications": {
- f"app-{a}": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- for a in range(apps)
- },
- "listeners": {
- f"*:{(7000 + a)}": {"pass": f"applications/app-{a}"}
- for a in range(apps)
- },
- }
- assert 'success' in self.conf(conf)
+def test_listeners_port_release():
+ for _ in range(10):
+ fail = False
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- def test_json_application_python_prefix(self):
- conf = {
- "applications": {
- "sub-app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- "prefix": "/app",
- }
- },
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
+ client.conf(
{
- "match": {"uri": "/app/*"},
- "action": {"pass": "applications/sub-app"},
+ "listeners": {"127.0.0.1:7080": {"pass": "routes"}},
+ "routes": [],
}
- ],
- }
+ )
- assert 'success' in self.conf(conf)
+ resp = client.conf({"listeners": {}, "applications": {}})
- def test_json_application_prefix_target(self):
- conf = {
- "applications": {
- "sub-app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "targets": {
- "foo": {"module": "foo.wsgi", "prefix": "/app"},
- "bar": {
- "module": "bar.wsgi",
- "callable": "bar",
- "prefix": "/api",
- },
- },
- }
- },
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/app/*"},
- "action": {"pass": "applications/sub-app/foo"},
- },
- {
- "match": {"uri": "/api/*"},
- "action": {"pass": "applications/sub-app/bar"},
- },
- ],
- }
+ try:
+ s.bind(('127.0.0.1', 7080))
+ s.listen()
- assert 'success' in self.conf(conf)
+ except OSError:
+ fail = True
- def test_json_application_invalid_python_prefix(self):
- conf = {
- "applications": {
- "sub-app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- "prefix": "app",
- }
- },
- "listeners": {"*:7080": {"pass": "applications/sub-app"}},
- }
+ if fail:
+ pytest.fail('cannot bind or listen to the address')
- assert 'error' in self.conf(conf)
+ assert 'success' in resp, 'port release'
- def test_json_application_empty_python_prefix(self):
- conf = {
- "applications": {
- "sub-app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- "prefix": "",
- }
- },
- "listeners": {"*:7080": {"pass": "applications/sub-app"}},
- }
- assert 'error' in self.conf(conf)
+def test_json_application_name_large():
+ name = "X" * 1024 * 1024
- def test_json_application_many2(self):
- conf = {
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": f"applications/{name}"}},
"applications": {
- f"app-{a}": {
+ name: {
"type": "python",
"processes": {"spare": 0},
"path": "/app",
"module": "wsgi",
}
- # Larger number of applications can cause test fail with default
- # open files limit due to the lack of file descriptors.
- for a in range(100)
},
- "listeners": {"*:7080": {"pass": "applications/app-1"}},
}
+ )
+
+
+@pytest.mark.skip('not yet')
+def test_json_application_many():
+ apps = 999
+
+ conf = {
+ "applications": {
+ f"app-{a}": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ for a in range(apps)
+ },
+ "listeners": {
+ f"*:{(7000 + a)}": {"pass": f"applications/app-{a}"}
+ for a in range(apps)
+ },
+ }
- assert 'success' in self.conf(conf)
+ assert 'success' in client.conf(conf)
- def test_unprivileged_user_error(self, is_su, skip_alert):
- skip_alert(r'cannot set user "root"', r'failed to apply new conf')
- if is_su:
- pytest.skip('unprivileged tests')
- assert 'error' in self.conf(
+def test_json_application_python_prefix():
+ conf = {
+ "applications": {
+ "sub-app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ "prefix": "/app",
+ }
+ },
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
{
- "app": {
- "type": "external",
- "processes": 1,
- "executable": "/app",
- "user": "root",
- }
+ "match": {"uri": "/app/*"},
+ "action": {"pass": "applications/sub-app"},
+ }
+ ],
+ }
+
+ assert 'success' in client.conf(conf)
+
+
+def test_json_application_prefix_target():
+ conf = {
+ "applications": {
+ "sub-app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "targets": {
+ "foo": {"module": "foo.wsgi", "prefix": "/app"},
+ "bar": {
+ "module": "bar.wsgi",
+ "callable": "bar",
+ "prefix": "/api",
+ },
+ },
+ }
+ },
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/app/*"},
+ "action": {"pass": "applications/sub-app/foo"},
+ },
+ {
+ "match": {"uri": "/api/*"},
+ "action": {"pass": "applications/sub-app/bar"},
},
- 'applications',
- ), 'setting user'
+ ],
+ }
+
+ assert 'success' in client.conf(conf)
+
+
+def test_json_application_invalid_python_prefix():
+ conf = {
+ "applications": {
+ "sub-app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ "prefix": "app",
+ }
+ },
+ "listeners": {"*:7080": {"pass": "applications/sub-app"}},
+ }
+
+ assert 'error' in client.conf(conf)
+
+
+def test_json_application_empty_python_prefix():
+ conf = {
+ "applications": {
+ "sub-app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ "prefix": "",
+ }
+ },
+ "listeners": {"*:7080": {"pass": "applications/sub-app"}},
+ }
+
+ assert 'error' in client.conf(conf)
+
+
+def test_json_application_many2():
+ conf = {
+ "applications": {
+ f"app-{a}": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ # Larger number of applications can cause test fail with default
+ # open files limit due to the lack of file descriptors.
+ for a in range(100)
+ },
+ "listeners": {"*:7080": {"pass": "applications/app-1"}},
+ }
+
+ assert 'success' in client.conf(conf)
+
+
+def test_unprivileged_user_error(require, skip_alert):
+ require({'privileged_user': False})
+
+ skip_alert(r'cannot set user "root"', r'failed to apply new conf')
+
+ assert 'error' in client.conf(
+ {
+ "app": {
+ "type": "external",
+ "processes": 1,
+ "executable": "/app",
+ "user": "root",
+ }
+ },
+ 'applications',
+ ), 'setting user'
diff --git a/test/test_forwarded_header.py b/test/test_forwarded_header.py
index eb2f25f8..c3f4a4c6 100644
--- a/test/test_forwarded_header.py
+++ b/test/test_forwarded_header.py
@@ -1,266 +1,270 @@
-from unit.applications.lang.python import TestApplicationPython
+import pytest
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'any'}}
-class TestForwardedHeader(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def forwarded_header(self, forwarded):
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {
- "forwarded": forwarded,
- "pass": "applications/forwarded_header",
- },
- "[::1]:7082": {
- "forwarded": forwarded,
- "pass": "applications/forwarded_header",
- },
- },
- 'listeners',
- ), 'listeners configure'
-
- def get_fwd(self, sock_type='ipv4', xff=None, xfp=None):
- port = 7081 if sock_type == 'ipv4' else 7082
- headers = {'Connection': 'close'}
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ client.load('forwarded_header')
- if xff is not None:
- headers['X-Forwarded-For'] = xff
- if xfp is not None:
- headers['X-Forwarded-Proto'] = xfp
-
- return self.get(sock_type=sock_type, port=port, headers=headers)[
- 'headers'
- ]
-
- def get_addr(self, *args, **kwargs):
- return self.get_fwd(*args, **kwargs)['Remote-Addr']
-
- def get_scheme(self, *args, **kwargs):
- return self.get_fwd(*args, **kwargs)['Url-Scheme']
-
- def setup_method(self):
- self.load('forwarded_header')
-
- def test_forwarded_header_single_ip(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '123.123.123.123',
- }
- )
-
- resp = self.get_fwd(xff='1.1.1.1', xfp='https')
- assert resp['Remote-Addr'] == '127.0.0.1', 'both headers addr'
- assert resp['Url-Scheme'] == 'http', 'both headers proto'
-
- assert self.get_addr() == '127.0.0.1', 'ipv4 default addr'
- assert self.get_addr('ipv6') == '::1', 'ipv6 default addr'
- assert self.get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source'
- assert self.get_addr(xff='blah') == '127.0.0.1', 'bad xff'
- assert self.get_addr('ipv6', '1.1.1.1') == '::1', 'bad source ipv6'
-
- assert self.get_scheme() == 'http', 'ipv4 default proto'
- assert self.get_scheme('ipv6') == 'http', 'ipv6 default proto'
- assert self.get_scheme(xfp='https') == 'http', 'bad proto'
- assert self.get_scheme(xfp='blah') == 'http', 'bad xfp'
- assert self.get_scheme('ipv6', xfp='https') == 'http', 'bad proto ipv6'
-
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '127.0.0.1',
- }
- )
-
- resp = self.get_fwd(xff='1.1.1.1', xfp='https')
- assert resp['Remote-Addr'] == '1.1.1.1', 'both headers addr 2'
- assert resp['Url-Scheme'] == 'https', 'both headers proto 2'
-
- assert self.get_addr() == '127.0.0.1', 'ipv4 default addr 2'
- assert self.get_addr('ipv6') == '::1', 'ipv6 default addr 2'
- assert self.get_addr(xff='1.1.1.1') == '1.1.1.1', 'xff replace'
- assert self.get_addr('ipv6', '1.1.1.1') == '::1', 'bad source ipv6 2'
-
- assert self.get_scheme() == 'http', 'ipv4 default proto 2'
- assert self.get_scheme('ipv6') == 'http', 'ipv6 default proto 2'
- assert self.get_scheme(xfp='https') == 'https', 'xfp replace'
- assert self.get_scheme(xfp='on') == 'https', 'xfp replace 2'
- assert (
- self.get_scheme('ipv6', xfp='https') == 'http'
- ), 'bad proto ipv6 2'
-
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '!127.0.0.1',
- }
- )
-
- assert self.get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source 3'
- assert self.get_addr('ipv6', '1.1.1.1') == '1.1.1.1', 'xff replace 2'
- assert self.get_scheme(xfp='https') == 'http', 'bad proto 2'
- assert self.get_scheme('ipv6', xfp='https') == 'https', 'xfp replace 3'
-
- def test_forwarded_header_ipv4(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '127.0.0.1',
- }
- )
-
- assert (
- self.get_addr(xff='8.8.8.8, 84.23.23.11') == '84.23.23.11'
- ), 'xff replace'
- assert (
- self.get_addr(xff='8.8.8.8, 84.23.23.11, 127.0.0.1') == '127.0.0.1'
- ), 'xff replace 2'
- assert (
- self.get_addr(xff=['8.8.8.8', '127.0.0.1, 10.0.1.1']) == '10.0.1.1'
- ), 'xff replace multi'
-
- assert self.get_scheme(xfp='http, https') == 'http', 'xfp replace'
- assert (
- self.get_scheme(xfp='http, https, http') == 'http'
- ), 'xfp replace 2'
- assert (
- self.get_scheme(xfp=['http, https', 'http', 'https']) == 'http'
- ), 'xfp replace multi'
-
- def test_forwarded_header_ipv6(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '::1',
- }
- )
-
- assert self.get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source ipv4'
-
- for ip in [
- 'f607:7403:1e4b:6c66:33b2:843f:2517:da27',
- '2001:db8:3c4d:15::1a2f:1a2b',
- '2001::3c4d:15:1a2f:1a2b',
- '::11.22.33.44',
- ]:
- assert self.get_addr('ipv6', ip) == ip, 'replace'
-
- assert self.get_scheme(xfp='https') == 'http', 'bad source ipv4'
-
- for proto in ['http', 'https']:
- assert self.get_scheme('ipv6', xfp=proto) == proto, 'replace'
-
- def test_forwarded_header_recursive(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'recursive': True,
- 'source': ['127.0.0.1', '10.50.0.17', '10.5.2.1'],
- }
- )
-
- assert self.get_addr(xff='1.1.1.1') == '1.1.1.1', 'xff chain'
- assert (
- self.get_addr(xff='1.1.1.1, 10.5.2.1') == '1.1.1.1'
- ), 'xff chain 2'
- assert (
- self.get_addr(xff='8.8.8.8, 1.1.1.1, 10.5.2.1') == '1.1.1.1'
- ), 'xff chain 3'
- assert (
- self.get_addr(xff='10.50.0.17, 10.5.2.1, 10.5.2.1') == '10.50.0.17'
- ), 'xff chain 4'
- assert (
- self.get_addr(xff=['8.8.8.8', '1.1.1.1, 127.0.0.1']) == '1.1.1.1'
- ), 'xff replace multi'
- assert (
- self.get_addr(xff=['8.8.8.8', '1.1.1.1, 127.0.0.1', '10.5.2.1'])
- == '1.1.1.1'
- ), 'xff replace multi 2'
- assert (
- self.get_addr(xff=['10.5.2.1', '10.50.0.17, 1.1.1.1', '10.5.2.1'])
- == '1.1.1.1'
- ), 'xff replace multi 3'
- assert (
- self.get_addr(
- xff='8.8.8.8, 2001:db8:3c4d:15::1a2f:1a2b, 127.0.0.1'
- )
- == '2001:db8:3c4d:15::1a2f:1a2b'
- ), 'xff chain ipv6'
-
- def test_forwarded_header_case_insensitive(self):
- self.forwarded_header(
- {
- 'client_ip': 'x-forwarded-for',
- 'protocol': 'x-forwarded-proto',
- 'source': '127.0.0.1',
- }
- )
-
- assert self.get_addr() == '127.0.0.1', 'ipv4 default addr'
- assert self.get_addr('ipv6') == '::1', 'ipv6 default addr'
- assert self.get_addr(xff='1.1.1.1') == '1.1.1.1', 'replace'
-
- assert self.get_scheme() == 'http', 'ipv4 default proto'
- assert self.get_scheme('ipv6') == 'http', 'ipv6 default proto'
- assert self.get_scheme(xfp='https') == 'https', 'replace 1'
- assert self.get_scheme(xfp='oN') == 'https', 'replace 2'
-
- def test_forwarded_header_source_empty(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': [],
- }
- )
-
- assert self.get_addr(xff='1.1.1.1') == '127.0.0.1', 'empty source xff'
- assert self.get_scheme(xfp='https') == 'http', 'empty source xfp'
-
- def test_forwarded_header_source_range(self):
- self.forwarded_header(
- {
- 'client_ip': 'X-Forwarded-For',
- 'protocol': 'X-Forwarded-Proto',
- 'source': '127.0.0.0-127.0.0.1',
+def forwarded_header(forwarded):
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {
+ "forwarded": forwarded,
+ "pass": "applications/forwarded_header",
+ },
+ "[::1]:7082": {
+ "forwarded": forwarded,
+ "pass": "applications/forwarded_header",
+ },
+ },
+ 'listeners',
+ ), 'listeners configure'
+
+
+def get_fwd(sock_type='ipv4', xff=None, xfp=None):
+ port = 7081 if sock_type == 'ipv4' else 7082
+
+ headers = {'Connection': 'close'}
+
+ if xff is not None:
+ headers['X-Forwarded-For'] = xff
+
+ if xfp is not None:
+ headers['X-Forwarded-Proto'] = xfp
+
+ return client.get(sock_type=sock_type, port=port, headers=headers)[
+ 'headers'
+ ]
+
+
+def get_addr(*args, **kwargs):
+ return get_fwd(*args, **kwargs)['Remote-Addr']
+
+
+def get_scheme(*args, **kwargs):
+ return get_fwd(*args, **kwargs)['Url-Scheme']
+
+
+def test_forwarded_header_single_ip():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '123.123.123.123',
+ }
+ )
+
+ resp = get_fwd(xff='1.1.1.1', xfp='https')
+ assert resp['Remote-Addr'] == '127.0.0.1', 'both headers addr'
+ assert resp['Url-Scheme'] == 'http', 'both headers proto'
+
+ assert get_addr() == '127.0.0.1', 'ipv4 default addr'
+ assert get_addr('ipv6') == '::1', 'ipv6 default addr'
+ assert get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source'
+ assert get_addr(xff='blah') == '127.0.0.1', 'bad xff'
+ assert get_addr('ipv6', '1.1.1.1') == '::1', 'bad source ipv6'
+
+ assert get_scheme() == 'http', 'ipv4 default proto'
+ assert get_scheme('ipv6') == 'http', 'ipv6 default proto'
+ assert get_scheme(xfp='https') == 'http', 'bad proto'
+ assert get_scheme(xfp='blah') == 'http', 'bad xfp'
+ assert get_scheme('ipv6', xfp='https') == 'http', 'bad proto ipv6'
+
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '127.0.0.1',
+ }
+ )
+
+ resp = get_fwd(xff='1.1.1.1', xfp='https')
+ assert resp['Remote-Addr'] == '1.1.1.1', 'both headers addr 2'
+ assert resp['Url-Scheme'] == 'https', 'both headers proto 2'
+
+ assert get_addr() == '127.0.0.1', 'ipv4 default addr 2'
+ assert get_addr('ipv6') == '::1', 'ipv6 default addr 2'
+ assert get_addr(xff='1.1.1.1') == '1.1.1.1', 'xff replace'
+ assert get_addr('ipv6', '1.1.1.1') == '::1', 'bad source ipv6 2'
+
+ assert get_scheme() == 'http', 'ipv4 default proto 2'
+ assert get_scheme('ipv6') == 'http', 'ipv6 default proto 2'
+ assert get_scheme(xfp='https') == 'https', 'xfp replace'
+ assert get_scheme(xfp='on') == 'https', 'xfp replace 2'
+ assert get_scheme('ipv6', xfp='https') == 'http', 'bad proto ipv6 2'
+
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '!127.0.0.1',
+ }
+ )
+
+ assert get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source 3'
+ assert get_addr('ipv6', '1.1.1.1') == '1.1.1.1', 'xff replace 2'
+ assert get_scheme(xfp='https') == 'http', 'bad proto 2'
+ assert get_scheme('ipv6', xfp='https') == 'https', 'xfp replace 3'
+
+
+def test_forwarded_header_ipv4():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '127.0.0.1',
+ }
+ )
+
+ assert get_addr(xff='8.8.8.8, 84.23.23.11') == '84.23.23.11', 'xff replace'
+ assert (
+ get_addr(xff='8.8.8.8, 84.23.23.11, 127.0.0.1') == '127.0.0.1'
+ ), 'xff replace 2'
+ assert (
+ get_addr(xff=['8.8.8.8', '127.0.0.1, 10.0.1.1']) == '10.0.1.1'
+ ), 'xff replace multi'
+
+ assert get_scheme(xfp='http, https') == 'http', 'xfp replace'
+ assert get_scheme(xfp='http, https, http') == 'http', 'xfp replace 2'
+ assert (
+ get_scheme(xfp=['http, https', 'http', 'https']) == 'http'
+ ), 'xfp replace multi'
+
+
+def test_forwarded_header_ipv6():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '::1',
+ }
+ )
+
+ assert get_addr(xff='1.1.1.1') == '127.0.0.1', 'bad source ipv4'
+
+ for ip in [
+ 'f607:7403:1e4b:6c66:33b2:843f:2517:da27',
+ '2001:db8:3c4d:15::1a2f:1a2b',
+ '2001::3c4d:15:1a2f:1a2b',
+ '::11.22.33.44',
+ ]:
+ assert get_addr('ipv6', ip) == ip, 'replace'
+
+ assert get_scheme(xfp='https') == 'http', 'bad source ipv4'
+
+ for proto in ['http', 'https']:
+ assert get_scheme('ipv6', xfp=proto) == proto, 'replace'
+
+
+def test_forwarded_header_recursive():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'recursive': True,
+ 'source': ['127.0.0.1', '10.50.0.17', '10.5.2.1'],
+ }
+ )
+
+ assert get_addr(xff='1.1.1.1') == '1.1.1.1', 'xff chain'
+ assert get_addr(xff='1.1.1.1, 10.5.2.1') == '1.1.1.1', 'xff chain 2'
+ assert (
+ get_addr(xff='8.8.8.8, 1.1.1.1, 10.5.2.1') == '1.1.1.1'
+ ), 'xff chain 3'
+ assert (
+ get_addr(xff='10.50.0.17, 10.5.2.1, 10.5.2.1') == '10.50.0.17'
+ ), 'xff chain 4'
+ assert (
+ get_addr(xff=['8.8.8.8', '1.1.1.1, 127.0.0.1']) == '1.1.1.1'
+ ), 'xff replace multi'
+ assert (
+ get_addr(xff=['8.8.8.8', '1.1.1.1, 127.0.0.1', '10.5.2.1']) == '1.1.1.1'
+ ), 'xff replace multi 2'
+ assert (
+ get_addr(xff=['10.5.2.1', '10.50.0.17, 1.1.1.1', '10.5.2.1'])
+ == '1.1.1.1'
+ ), 'xff replace multi 3'
+ assert (
+ get_addr(xff='8.8.8.8, 2001:db8:3c4d:15::1a2f:1a2b, 127.0.0.1')
+ == '2001:db8:3c4d:15::1a2f:1a2b'
+ ), 'xff chain ipv6'
+
+
+def test_forwarded_header_case_insensitive():
+ forwarded_header(
+ {
+ 'client_ip': 'x-forwarded-for',
+ 'protocol': 'x-forwarded-proto',
+ 'source': '127.0.0.1',
+ }
+ )
+
+ assert get_addr() == '127.0.0.1', 'ipv4 default addr'
+ assert get_addr('ipv6') == '::1', 'ipv6 default addr'
+ assert get_addr(xff='1.1.1.1') == '1.1.1.1', 'replace'
+
+ assert get_scheme() == 'http', 'ipv4 default proto'
+ assert get_scheme('ipv6') == 'http', 'ipv6 default proto'
+ assert get_scheme(xfp='https') == 'https', 'replace 1'
+ assert get_scheme(xfp='oN') == 'https', 'replace 2'
+
+
+def test_forwarded_header_source_empty():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': [],
+ }
+ )
+
+ assert get_addr(xff='1.1.1.1') == '127.0.0.1', 'empty source xff'
+ assert get_scheme(xfp='https') == 'http', 'empty source xfp'
+
+
+def test_forwarded_header_source_range():
+ forwarded_header(
+ {
+ 'client_ip': 'X-Forwarded-For',
+ 'protocol': 'X-Forwarded-Proto',
+ 'source': '127.0.0.0-127.0.0.1',
+ }
+ )
+
+ assert get_addr(xff='1.1.1.1') == '1.1.1.1', 'source range'
+ assert get_addr('ipv6', '1.1.1.1') == '::1', 'source range 2'
+
+
+def test_forwarded_header_invalid():
+ assert 'error' in client.conf(
+ {
+ "127.0.0.1:7081": {
+ "forwarded": {"source": '127.0.0.1'},
+ "pass": "applications/forwarded_header",
}
- )
-
- assert self.get_addr(xff='1.1.1.1') == '1.1.1.1', 'source range'
- assert self.get_addr('ipv6', '1.1.1.1') == '::1', 'source range 2'
+ },
+ 'listeners',
+ ), 'invalid forward'
- def test_forwarded_header_invalid(self):
- assert 'error' in self.conf(
+ def check_invalid_source(source):
+ assert 'error' in client.conf(
{
"127.0.0.1:7081": {
- "forwarded": {"source": '127.0.0.1'},
+ "forwarded": {
+ "client_ip": "X-Forwarded-For",
+ "source": source,
+ },
"pass": "applications/forwarded_header",
}
},
'listeners',
- ), 'invalid forward'
-
- def check_invalid_source(source):
- assert 'error' in self.conf(
- {
- "127.0.0.1:7081": {
- "forwarded": {
- "client_ip": "X-Forwarded-For",
- "source": source,
- },
- "pass": "applications/forwarded_header",
- }
- },
- 'listeners',
- ), 'invalid source'
-
- check_invalid_source(None)
- check_invalid_source('a')
- check_invalid_source(['a'])
+ ), 'invalid source'
+
+ check_invalid_source(None)
+ check_invalid_source('a')
+ check_invalid_source(['a'])
diff --git a/test/test_go_application.py b/test/test_go_application.py
index 9034d5aa..8f406744 100644
--- a/test/test_go_application.py
+++ b/test/test_go_application.py
@@ -1,167 +1,168 @@
import re
-import pytest
-from unit.applications.lang.go import TestApplicationGo
+from unit.applications.lang.go import ApplicationGo
+prerequisites = {'modules': {'go': 'all'}}
-class TestGoApplication(TestApplicationGo):
- prerequisites = {'modules': {'go': 'all'}}
+client = ApplicationGo()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- skip_alert(r'\[unit\] close\(\d+\) failed: Bad file descriptor')
- def test_go_application_variables(self):
- self.load('variables')
+def test_go_application_variables(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- body = 'Test body string.'
+ body = 'Test body string.'
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': 'blah',
- 'Connection': 'close',
- },
- body=body,
- )
-
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- assert headers == {
- 'Content-Length': str(len(body)),
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
'Content-Type': 'text/html',
- 'Request-Method': 'POST',
- 'Request-Uri': '/',
- 'Http-Host': 'localhost',
- 'Server-Protocol': 'HTTP/1.1',
- 'Server-Protocol-Major': '1',
- 'Server-Protocol-Minor': '1',
'Custom-Header': 'blah',
'Connection': 'close',
- }, 'headers'
- assert resp['body'] == body, 'body'
+ },
+ body=body,
+ )
+
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
+
+ assert headers == {
+ 'Content-Length': str(len(body)),
+ 'Content-Type': 'text/html',
+ 'Request-Method': 'POST',
+ 'Request-Uri': '/',
+ 'Http-Host': 'localhost',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Server-Protocol-Major': '1',
+ 'Server-Protocol-Minor': '1',
+ 'Custom-Header': 'blah',
+ 'Connection': 'close',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
+
+
+def test_go_application_get_variables():
+ client.load('get_variables')
+
+ resp = client.get(url='/?var1=val1&var2=&var3')
+ assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
+ assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
+ assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
+
+
+def test_go_application_post_variables():
+ client.load('post_variables')
+
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Connection': 'close',
+ },
+ body='var1=val1&var2=&var3',
+ )
- def test_go_application_get_variables(self):
- self.load('get_variables')
+ assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
+ assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
+ assert resp['headers']['X-Var-3'] == '', 'POST variables 3'
- resp = self.get(url='/?var1=val1&var2=&var3')
- assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
- assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
- assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
- def test_go_application_post_variables(self):
- self.load('post_variables')
+def test_go_application_404():
+ client.load('404')
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'application/x-www-form-urlencoded',
- 'Connection': 'close',
- },
- body='var1=val1&var2=&var3',
- )
+ resp = client.get()
- assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
- assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
- assert resp['headers']['X-Var-3'] == '', 'POST variables 3'
+ assert resp['status'] == 404, '404 status'
+ assert re.search(r'<title>404 Not Found</title>', resp['body']), '404 body'
- def test_go_application_404(self):
- self.load('404')
- resp = self.get()
+def test_go_keepalive_body():
+ client.load('mirror')
- assert resp['status'] == 404, '404 status'
- assert re.search(
- r'<title>404 Not Found</title>', resp['body']
- ), '404 body'
+ assert client.get()['status'] == 200, 'init'
- def test_go_keepalive_body(self):
- self.load('mirror')
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
- assert self.get()['status'] == 200, 'init'
+ assert resp['body'] == body, 'keep-alive 1'
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
+ assert resp['body'] == body, 'keep-alive 2'
+
+
+def test_go_application_cookies():
+ client.load('cookies')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': 'var1=val1; var2=val2',
+ 'Connection': 'close',
+ }
+ )
- assert resp['body'] == body, 'keep-alive 1'
+ assert resp['headers']['X-Cookie-1'] == 'val1', 'cookie 1'
+ assert resp['headers']['X-Cookie-2'] == 'val2', 'cookie 2'
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
- assert resp['body'] == body, 'keep-alive 2'
- def test_go_application_cookies(self):
- self.load('cookies')
+def test_go_application_command_line_arguments_type():
+ client.load('command_line_arguments')
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': 'var1=val1; var2=val2',
- 'Connection': 'close',
- }
- )
+ assert 'error' in client.conf(
+ '' "a b c", 'applications/command_line_arguments/arguments'
+ ), 'arguments type'
- assert resp['headers']['X-Cookie-1'] == 'val1', 'cookie 1'
- assert resp['headers']['X-Cookie-2'] == 'val2', 'cookie 2'
- def test_go_application_command_line_arguments_type(self):
- self.load('command_line_arguments')
+def test_go_application_command_line_arguments_0():
+ client.load('command_line_arguments')
- assert 'error' in self.conf(
- '' "a b c", 'applications/command_line_arguments/arguments'
- ), 'arguments type'
+ assert client.get()['headers']['X-Arg-0'] == client.conf_get(
+ 'applications/command_line_arguments/executable'
+ ), 'argument 0'
- def test_go_application_command_line_arguments_0(self):
- self.load('command_line_arguments')
- assert self.get()['headers']['X-Arg-0'] == self.conf_get(
- 'applications/command_line_arguments/executable'
- ), 'argument 0'
+def test_go_application_command_line_arguments():
+ client.load('command_line_arguments')
- def test_go_application_command_line_arguments(self):
- self.load('command_line_arguments')
+ arg1 = '--cc=gcc-7.2.0'
+ arg2 = "--cc-opt='-O0 -DNXT_DEBUG_MEMORY=1 -fsanitize=address'"
+ arg3 = '--debug'
- arg1 = '--cc=gcc-7.2.0'
- arg2 = "--cc-opt='-O0 -DNXT_DEBUG_MEMORY=1 -fsanitize=address'"
- arg3 = '--debug'
+ assert 'success' in client.conf(
+ f'["{arg1}", "{arg2}", "{arg3}"]',
+ 'applications/command_line_arguments/arguments',
+ )
- assert 'success' in self.conf(
- f'["{arg1}", "{arg2}", "{arg3}"]',
- 'applications/command_line_arguments/arguments',
- )
+ assert client.get()['body'] == f'{arg1},{arg2},{arg3}', 'arguments'
- assert self.get()['body'] == f'{arg1},{arg2},{arg3}', 'arguments'
- def test_go_application_command_line_arguments_change(self):
- self.load('command_line_arguments')
+def test_go_application_command_line_arguments_change():
+ client.load('command_line_arguments')
- args_path = 'applications/command_line_arguments/arguments'
+ args_path = 'applications/command_line_arguments/arguments'
- assert 'success' in self.conf('["0", "a", "$", ""]', args_path)
+ assert 'success' in client.conf('["0", "a", "$", ""]', args_path)
- assert self.get()['body'] == '0,a,$,', 'arguments'
+ assert client.get()['body'] == '0,a,$,', 'arguments'
- assert 'success' in self.conf('["-1", "b", "%"]', args_path)
+ assert 'success' in client.conf('["-1", "b", "%"]', args_path)
- assert self.get()['body'] == '-1,b,%', 'arguments change'
+ assert client.get()['body'] == '-1,b,%', 'arguments change'
- assert 'success' in self.conf('[]', args_path)
+ assert 'success' in client.conf('[]', args_path)
- assert self.get()['headers']['Content-Length'] == '0', 'arguments empty'
+ assert client.get()['headers']['Content-Length'] == '0', 'arguments empty'
diff --git a/test/test_go_isolation.py b/test/test_go_isolation.py
index f063f987..ba3390ea 100644
--- a/test/test_go_isolation.py
+++ b/test/test_go_isolation.py
@@ -3,362 +3,365 @@ import os
import pwd
import pytest
-from unit.applications.lang.go import TestApplicationGo
+from unit.applications.lang.go import ApplicationGo
from unit.option import option
from unit.utils import getns
+prerequisites = {'modules': {'go': 'any'}, 'features': {'isolation': True}}
-class TestGoIsolation(TestApplicationGo):
- prerequisites = {'modules': {'go': 'any'}, 'features': ['isolation']}
+client = ApplicationGo()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- skip_alert(r'\[unit\] close\(\d+\) failed: Bad file descriptor')
- def unpriv_creds(self):
- nobody_uid = pwd.getpwnam('nobody').pw_uid
+def unpriv_creds():
+ nobody_uid = pwd.getpwnam('nobody').pw_uid
- try:
- nogroup_gid = grp.getgrnam('nogroup').gr_gid
- nogroup = 'nogroup'
- except KeyError:
- nogroup_gid = grp.getgrnam('nobody').gr_gid
- nogroup = 'nobody'
+ try:
+ nogroup_gid = grp.getgrnam('nogroup').gr_gid
+ nogroup = 'nogroup'
+ except KeyError:
+ nogroup_gid = grp.getgrnam('nobody').gr_gid
+ nogroup = 'nobody'
- return (nobody_uid, nogroup_gid, nogroup)
+ return (nobody_uid, nogroup_gid, nogroup)
- def isolation_key(self, key):
- return key in option.available['features']['isolation'].keys()
- def test_isolation_values(self):
- self.load('ns_inspect')
+def test_isolation_values():
+ client.load('ns_inspect')
- obj = self.getjson()['body']
+ obj = client.getjson()['body']
- for ns, ns_value in option.available['features']['isolation'].items():
- if ns.upper() in obj['NS']:
- assert obj['NS'][ns.upper()] == ns_value, f'{ns} match'
+ for ns, ns_value in option.available['features']['isolation'].items():
+ if ns.upper() in obj['NS']:
+ assert obj['NS'][ns.upper()] == ns_value, f'{ns} match'
- def test_isolation_unpriv_user(self, is_su):
- if not self.isolation_key('unprivileged_userns_clone'):
- pytest.skip('unprivileged clone is not available')
- if is_su:
- pytest.skip('privileged tests, skip this')
+def test_isolation_unpriv_user(require):
+ require(
+ {
+ 'privileged_user': False,
+ 'features': {'isolation': ['unprivileged_userns_clone']},
+ }
+ )
- self.load('ns_inspect')
- obj = self.getjson()['body']
+ client.load('ns_inspect')
+ obj = client.getjson()['body']
- assert obj['UID'] == os.geteuid(), 'uid match'
- assert obj['GID'] == os.getegid(), 'gid match'
+ assert obj['UID'] == os.geteuid(), 'uid match'
+ assert obj['GID'] == os.getegid(), 'gid match'
- self.load('ns_inspect', isolation={'namespaces': {'credential': True}})
+ client.load('ns_inspect', isolation={'namespaces': {'credential': True}})
- obj = self.getjson()['body']
+ obj = client.getjson()['body']
- nobody_uid, nogroup_gid, nogroup = self.unpriv_creds()
+ nobody_uid, nogroup_gid, nogroup = unpriv_creds()
- # unprivileged unit map itself to nobody in the container by default
- assert obj['UID'] == nobody_uid, 'uid of nobody'
- assert obj['GID'] == nogroup_gid, f'gid of {nogroup}'
+ # unprivileged unit map itself to nobody in the container by default
+ assert obj['UID'] == nobody_uid, 'uid of nobody'
+ assert obj['GID'] == nogroup_gid, f'gid of {nogroup}'
- self.load(
- 'ns_inspect',
- user='root',
- isolation={'namespaces': {'credential': True}},
- )
+ client.load(
+ 'ns_inspect',
+ user='root',
+ isolation={'namespaces': {'credential': True}},
+ )
- obj = self.getjson()['body']
+ obj = client.getjson()['body']
- assert obj['UID'] == 0, 'uid match user=root'
- assert obj['GID'] == 0, 'gid match user=root'
+ assert obj['UID'] == 0, 'uid match user=root'
+ assert obj['GID'] == 0, 'gid match user=root'
- self.load(
- 'ns_inspect',
- user='root',
- group=nogroup,
- isolation={'namespaces': {'credential': True}},
- )
+ client.load(
+ 'ns_inspect',
+ user='root',
+ group=nogroup,
+ isolation={'namespaces': {'credential': True}},
+ )
- obj = self.getjson()['body']
+ obj = client.getjson()['body']
- assert obj['UID'] == 0, 'uid match user=root group=nogroup'
- assert obj['GID'] == nogroup_gid, 'gid match user=root group=nogroup'
+ assert obj['UID'] == 0, 'uid match user=root group=nogroup'
+ assert obj['GID'] == nogroup_gid, 'gid match user=root group=nogroup'
- self.load(
- 'ns_inspect',
- user='root',
- group='root',
- isolation={
- 'namespaces': {'credential': True},
- 'uidmap': [{'container': 0, 'host': os.geteuid(), 'size': 1}],
- 'gidmap': [{'container': 0, 'host': os.getegid(), 'size': 1}],
- },
- )
+ client.load(
+ 'ns_inspect',
+ user='root',
+ group='root',
+ isolation={
+ 'namespaces': {'credential': True},
+ 'uidmap': [{'container': 0, 'host': os.geteuid(), 'size': 1}],
+ 'gidmap': [{'container': 0, 'host': os.getegid(), 'size': 1}],
+ },
+ )
- obj = self.getjson()['body']
+ obj = client.getjson()['body']
- assert obj['UID'] == 0, 'uid match uidmap'
- assert obj['GID'] == 0, 'gid match gidmap'
+ assert obj['UID'] == 0, 'uid match uidmap'
+ assert obj['GID'] == 0, 'gid match gidmap'
- def test_isolation_priv_user(self, is_su):
- if not is_su:
- pytest.skip('unprivileged tests, skip this')
- self.load('ns_inspect')
+def test_isolation_priv_user(require):
+ require({'privileged_user': True})
- nobody_uid, nogroup_gid, nogroup = self.unpriv_creds()
+ client.load('ns_inspect')
- obj = self.getjson()['body']
+ nobody_uid, nogroup_gid, nogroup = unpriv_creds()
- assert obj['UID'] == nobody_uid, 'uid match'
- assert obj['GID'] == nogroup_gid, 'gid match'
+ obj = client.getjson()['body']
- self.load('ns_inspect', isolation={'namespaces': {'credential': True}})
+ assert obj['UID'] == nobody_uid, 'uid match'
+ assert obj['GID'] == nogroup_gid, 'gid match'
- obj = self.getjson()['body']
+ client.load('ns_inspect', isolation={'namespaces': {'credential': True}})
- # privileged unit map app creds in the container by default
- assert obj['UID'] == nobody_uid, 'uid nobody'
- assert obj['GID'] == nogroup_gid, 'gid nobody'
+ obj = client.getjson()['body']
- self.load(
- 'ns_inspect',
- user='root',
- isolation={'namespaces': {'credential': True}},
- )
+ # privileged unit map app creds in the container by default
+ assert obj['UID'] == nobody_uid, 'uid nobody'
+ assert obj['GID'] == nogroup_gid, 'gid nobody'
- obj = self.getjson()['body']
+ client.load(
+ 'ns_inspect',
+ user='root',
+ isolation={'namespaces': {'credential': True}},
+ )
- assert obj['UID'] == 0, 'uid nobody user=root'
- assert obj['GID'] == 0, 'gid nobody user=root'
+ obj = client.getjson()['body']
- self.load(
- 'ns_inspect',
- user='root',
- group=nogroup,
- isolation={'namespaces': {'credential': True}},
- )
+ assert obj['UID'] == 0, 'uid nobody user=root'
+ assert obj['GID'] == 0, 'gid nobody user=root'
- obj = self.getjson()['body']
+ client.load(
+ 'ns_inspect',
+ user='root',
+ group=nogroup,
+ isolation={'namespaces': {'credential': True}},
+ )
- assert obj['UID'] == 0, 'uid match user=root group=nogroup'
- assert obj['GID'] == nogroup_gid, 'gid match user=root group=nogroup'
+ obj = client.getjson()['body']
- self.load(
- 'ns_inspect',
- user='root',
- group='root',
- isolation={
- 'namespaces': {'credential': True},
- 'uidmap': [{'container': 0, 'host': 0, 'size': 1}],
- 'gidmap': [{'container': 0, 'host': 0, 'size': 1}],
- },
- )
+ assert obj['UID'] == 0, 'uid match user=root group=nogroup'
+ assert obj['GID'] == nogroup_gid, 'gid match user=root group=nogroup'
- obj = self.getjson()['body']
+ client.load(
+ 'ns_inspect',
+ user='root',
+ group='root',
+ isolation={
+ 'namespaces': {'credential': True},
+ 'uidmap': [{'container': 0, 'host': 0, 'size': 1}],
+ 'gidmap': [{'container': 0, 'host': 0, 'size': 1}],
+ },
+ )
- assert obj['UID'] == 0, 'uid match uidmap user=root'
- assert obj['GID'] == 0, 'gid match gidmap user=root'
+ obj = client.getjson()['body']
- # map 65535 uids
- self.load(
- 'ns_inspect',
- user='nobody',
- isolation={
- 'namespaces': {'credential': True},
- 'uidmap': [{'container': 0, 'host': 0, 'size': nobody_uid + 1}],
- },
- )
+ assert obj['UID'] == 0, 'uid match uidmap user=root'
+ assert obj['GID'] == 0, 'gid match gidmap user=root'
- obj = self.getjson()['body']
+ # map 65535 uids
+ client.load(
+ 'ns_inspect',
+ user='nobody',
+ isolation={
+ 'namespaces': {'credential': True},
+ 'uidmap': [{'container': 0, 'host': 0, 'size': nobody_uid + 1}],
+ },
+ )
- assert obj['UID'] == nobody_uid, 'uid match uidmap user=nobody'
- assert obj['GID'] == nogroup_gid, 'gid match uidmap user=nobody'
+ obj = client.getjson()['body']
- def test_isolation_mnt(self):
- if not self.isolation_key('mnt'):
- pytest.skip('mnt namespace is not supported')
+ assert obj['UID'] == nobody_uid, 'uid match uidmap user=nobody'
+ assert obj['GID'] == nogroup_gid, 'gid match uidmap user=nobody'
- if not self.isolation_key('unprivileged_userns_clone'):
- pytest.skip('unprivileged clone is not available')
- self.load(
- 'ns_inspect',
- isolation={'namespaces': {'mount': True, 'credential': True}},
+def test_isolation_mnt(require):
+ require(
+ {
+ 'features': {'isolation': ['unprivileged_userns_clone', 'mnt']},
+ }
+ )
+
+ client.load(
+ 'ns_inspect',
+ isolation={'namespaces': {'mount': True, 'credential': True}},
+ )
+
+ obj = client.getjson()['body']
+
+ # all but user and mnt
+ allns = list(option.available['features']['isolation'].keys())
+ allns.remove('user')
+ allns.remove('mnt')
+
+ for ns in allns:
+ if ns.upper() in obj['NS']:
+ assert (
+ obj['NS'][ns.upper()]
+ == option.available['features']['isolation'][ns]
+ ), f'{ns} match'
+
+ assert obj['NS']['MNT'] != getns('mnt'), 'mnt set'
+ assert obj['NS']['USER'] != getns('user'), 'user set'
+
+
+def test_isolation_pid(is_su, require):
+ require({'features': {'isolation': ['pid']}})
+
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ ]
+ }
+ }
)
- obj = self.getjson()['body']
-
- # all but user and mnt
- allns = list(option.available['features']['isolation'].keys())
- allns.remove('user')
- allns.remove('mnt')
-
- for ns in allns:
- if ns.upper() in obj['NS']:
- assert (
- obj['NS'][ns.upper()]
- == option.available['features']['isolation'][ns]
- ), f'{ns} match'
-
- assert obj['NS']['MNT'] != getns('mnt'), 'mnt set'
- assert obj['NS']['USER'] != getns('user'), 'user set'
-
- def test_isolation_pid(self, is_su):
- if not self.isolation_key('pid'):
- pytest.skip('pid namespace is not supported')
+ isolation = {'namespaces': {'pid': True}}
- if not is_su:
- if not self.isolation_key('unprivileged_userns_clone'):
- pytest.skip('unprivileged clone is not available')
+ if not is_su:
+ isolation['namespaces']['mount'] = True
+ isolation['namespaces']['credential'] = True
- if not self.isolation_key('user'):
- pytest.skip('user namespace is not supported')
+ client.load('ns_inspect', isolation=isolation)
- if not self.isolation_key('mnt'):
- pytest.skip('mnt namespace is not supported')
+ obj = client.getjson()['body']
- isolation = {'namespaces': {'pid': True}}
+ assert obj['PID'] == 2, 'pid of container is 2'
- if not is_su:
- isolation['namespaces']['mount'] = True
- isolation['namespaces']['credential'] = True
- self.load('ns_inspect', isolation=isolation)
+def test_isolation_namespace_false():
+ client.load('ns_inspect')
+ allns = list(option.available['features']['isolation'].keys())
- obj = self.getjson()['body']
+ remove_list = ['unprivileged_userns_clone', 'ipc', 'cgroup']
+ allns = [ns for ns in allns if ns not in remove_list]
- assert obj['PID'] == 2, 'pid of container is 2'
+ namespaces = {}
+ for ns in allns:
+ if ns == 'user':
+ namespaces['credential'] = False
+ elif ns == 'mnt':
+ namespaces['mount'] = False
+ elif ns == 'net':
+ namespaces['network'] = False
+ elif ns == 'uts':
+ namespaces['uname'] = False
+ else:
+ namespaces[ns] = False
- def test_isolation_namespace_false(self):
- self.load('ns_inspect')
- allns = list(option.available['features']['isolation'].keys())
+ client.load('ns_inspect', isolation={'namespaces': namespaces})
- remove_list = ['unprivileged_userns_clone', 'ipc', 'cgroup']
- allns = [ns for ns in allns if ns not in remove_list]
+ obj = client.getjson()['body']
- namespaces = {}
- for ns in allns:
- if ns == 'user':
- namespaces['credential'] = False
- elif ns == 'mnt':
- namespaces['mount'] = False
- elif ns == 'net':
- namespaces['network'] = False
- elif ns == 'uts':
- namespaces['uname'] = False
- else:
- namespaces[ns] = False
+ for ns in allns:
+ if ns.upper() in obj['NS']:
+ assert (
+ obj['NS'][ns.upper()]
+ == option.available['features']['isolation'][ns]
+ ), f'{ns} match'
- self.load('ns_inspect', isolation={'namespaces': namespaces})
- obj = self.getjson()['body']
-
- for ns in allns:
- if ns.upper() in obj['NS']:
- assert (
- obj['NS'][ns.upper()]
- == option.available['features']['isolation'][ns]
- ), f'{ns} match'
-
- def test_go_isolation_rootfs_container(self, is_su, temp_dir):
- if not is_su:
- if not self.isolation_key('unprivileged_userns_clone'):
- pytest.skip('unprivileged clone is not available')
-
- if not self.isolation_key('user'):
- pytest.skip('user namespace is not supported')
-
- if not self.isolation_key('mnt'):
- pytest.skip('mnt namespace is not supported')
-
- if not self.isolation_key('pid'):
- pytest.skip('pid namespace is not supported')
-
- isolation = {'rootfs': temp_dir}
-
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
+def test_go_isolation_rootfs_container(is_su, require, temp_dir):
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
}
+ )
- self.load('ns_inspect', isolation=isolation)
+ isolation = {'rootfs': temp_dir}
- obj = self.getjson(url='/?file=/go/app')['body']
+ if not is_su:
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
- assert obj['FileExists'] == True, 'app relative to rootfs'
+ client.load('ns_inspect', isolation=isolation)
- obj = self.getjson(url='/?file=/bin/sh')['body']
- assert obj['FileExists'] == False, 'file should not exists'
+ obj = client.getjson(url='/?file=/go/app')['body']
- def test_go_isolation_rootfs_container_priv(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
+ assert obj['FileExists'], 'app relative to rootfs'
- if not self.isolation_key('mnt'):
- pytest.skip('mnt namespace is not supported')
+ obj = client.getjson(url='/?file=/bin/sh')['body']
+ assert not obj['FileExists'], 'file should not exists'
- isolation = {
- 'namespaces': {'mount': True},
- 'rootfs': temp_dir,
- }
- self.load('ns_inspect', isolation=isolation)
+def test_go_isolation_rootfs_container_priv(require, temp_dir):
+ require({'privileged_user': True, 'features': {'isolation': ['mnt']}})
- obj = self.getjson(url='/?file=/go/app')['body']
+ isolation = {
+ 'namespaces': {'mount': True},
+ 'rootfs': temp_dir,
+ }
- assert obj['FileExists'] == True, 'app relative to rootfs'
+ client.load('ns_inspect', isolation=isolation)
- obj = self.getjson(url='/?file=/bin/sh')['body']
- assert obj['FileExists'] == False, 'file should not exists'
+ obj = client.getjson(url='/?file=/go/app')['body']
- def test_go_isolation_rootfs_automount_tmpfs(self, is_su, temp_dir):
- try:
- open("/proc/self/mountinfo")
- except:
- pytest.skip('The system lacks /proc/self/mountinfo file')
+ assert obj['FileExists'], 'app relative to rootfs'
- if not is_su:
- if not self.isolation_key('unprivileged_userns_clone'):
- pytest.skip('unprivileged clone is not available')
+ obj = client.getjson(url='/?file=/bin/sh')['body']
+ assert not obj['FileExists'], 'file should not exists'
- if not self.isolation_key('user'):
- pytest.skip('user namespace is not supported')
- if not self.isolation_key('mnt'):
- pytest.skip('mnt namespace is not supported')
+def test_go_isolation_rootfs_automount_tmpfs(is_su, require, temp_dir):
+ try:
+ open("/proc/self/mountinfo")
+ except:
+ pytest.skip('The system lacks /proc/self/mountinfo file')
- if not self.isolation_key('pid'):
- pytest.skip('pid namespace is not supported')
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
+ }
+ )
- isolation = {'rootfs': temp_dir}
+ isolation = {'rootfs': temp_dir}
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
- }
+ if not is_su:
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
- isolation['automount'] = {'tmpfs': False}
+ isolation['automount'] = {'tmpfs': False}
- self.load('ns_inspect', isolation=isolation)
+ client.load('ns_inspect', isolation=isolation)
- obj = self.getjson(url='/?mounts=true')['body']
+ obj = client.getjson(url='/?mounts=true')['body']
- assert (
- "/ /tmp" not in obj['Mounts'] and "tmpfs" not in obj['Mounts']
- ), 'app has no /tmp mounted'
+ assert (
+ "/ /tmp" not in obj['Mounts'] and "tmpfs" not in obj['Mounts']
+ ), 'app has no /tmp mounted'
- isolation['automount'] = {'tmpfs': True}
+ isolation['automount'] = {'tmpfs': True}
- self.load('ns_inspect', isolation=isolation)
+ client.load('ns_inspect', isolation=isolation)
- obj = self.getjson(url='/?mounts=true')['body']
+ obj = client.getjson(url='/?mounts=true')['body']
- assert (
- "/ /tmp" in obj['Mounts'] and "tmpfs" in obj['Mounts']
- ), 'app has /tmp mounted on /'
+ assert (
+ "/ /tmp" in obj['Mounts'] and "tmpfs" in obj['Mounts']
+ ), 'app has /tmp mounted on /'
diff --git a/test/test_go_isolation_rootfs.py b/test/test_go_isolation_rootfs.py
index d246a48d..b627b515 100644
--- a/test/test_go_isolation_rootfs.py
+++ b/test/test_go_isolation_rootfs.py
@@ -1,32 +1,19 @@
-import os
+from unit.applications.lang.go import ApplicationGo
-import pytest
-from unit.applications.lang.go import TestApplicationGo
+prerequisites = {
+ 'modules': {'go': 'all'},
+ 'features': {'isolation': True},
+ 'privileged_user': True,
+}
+client = ApplicationGo()
-class TestGoIsolationRootfs(TestApplicationGo):
- prerequisites = {'modules': {'go': 'all'}}
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- skip_alert(r'\[unit\] close\(\d+\) failed: Bad file descriptor')
+def test_go_isolation_rootfs_chroot(temp_dir):
+ client.load('ns_inspect', isolation={'rootfs': temp_dir})
- def test_go_isolation_rootfs_chroot(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
+ obj = client.getjson(url='/?file=/go/app')['body']
+ assert obj['FileExists'], 'app relative to rootfs'
- if os.uname().sysname == 'Darwin':
- pytest.skip('chroot tests not supported on OSX')
-
- isolation = {
- 'rootfs': temp_dir,
- }
-
- self.load('ns_inspect', isolation=isolation)
-
- obj = self.getjson(url='/?file=/go/app')['body']
-
- assert obj['FileExists'] == True, 'app relative to rootfs'
-
- obj = self.getjson(url='/?file=/bin/sh')['body']
- assert obj['FileExists'] == False, 'file should not exists'
+ obj = client.getjson(url='/?file=/bin/sh')['body']
+ assert not obj['FileExists'], 'file should not exists'
diff --git a/test/test_http_header.py b/test/test_http_header.py
index cae5e9b8..af836e6f 100644
--- a/test/test_http_header.py
+++ b/test/test_http_header.py
@@ -1,468 +1,500 @@
import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'any'}}
-class TestHTTPHeader(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def test_http_header_value_leading_sp(self):
- self.load('custom_header')
- resp = self.get(
+def test_http_header_value_leading_sp():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': ' ,',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value leading sp status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value leading sp custom header'
+
+
+def test_http_header_value_leading_htab():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': '\t,',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value leading htab status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value leading htab custom header'
+
+
+def test_http_header_value_trailing_sp():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': ', ',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value trailing sp status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value trailing sp custom header'
+
+
+def test_http_header_value_trailing_htab():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': ',\t',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value trailing htab status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value trailing htab custom header'
+
+
+def test_http_header_value_both_sp():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': ' , ',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value both sp status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value both sp custom header'
+
+
+def test_http_header_value_both_htab():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': '\t,\t',
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value both htab status'
+ assert (
+ resp['headers']['Custom-Header'] == ','
+ ), 'value both htab custom header'
+
+
+def test_http_header_value_chars():
+ client.load('custom_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Custom-Header': r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~",
+ 'Connection': 'close',
+ }
+ )
+
+ assert resp['status'] == 200, 'value chars status'
+ assert (
+ resp['headers']['Custom-Header']
+ == r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~"
+ ), 'value chars custom header'
+
+
+def test_http_header_value_chars_edge():
+ client.load('custom_header')
+
+ resp = client.http(
+ b"""GET / HTTP/1.1
+Host: localhost
+Custom-Header: \x20\xFF
+Connection: close
+
+""",
+ raw=True,
+ encoding='latin1',
+ )
+
+ assert resp['status'] == 200, 'value chars edge status'
+ assert resp['headers']['Custom-Header'] == '\xFF', 'value chars edge'
+
+
+def test_http_header_value_chars_below():
+ client.load('custom_header')
+
+ resp = client.http(
+ b"""GET / HTTP/1.1
+Host: localhost
+Custom-Header: \x1F
+Connection: close
+
+""",
+ raw=True,
+ )
+
+ assert resp['status'] == 400, 'value chars below'
+
+
+def test_http_header_field_leading_sp():
+ client.load('empty')
+
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'Custom-Header': ' ,',
+ ' Custom-Header': 'blah',
'Connection': 'close',
}
- )
+ )['status']
+ == 400
+ ), 'field leading sp'
- assert resp['status'] == 200, 'value leading sp status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value leading sp custom header'
- def test_http_header_value_leading_htab(self):
- self.load('custom_header')
+def test_http_header_field_leading_htab():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'Custom-Header': '\t,',
+ '\tCustom-Header': 'blah',
'Connection': 'close',
}
- )
+ )['status']
+ == 400
+ ), 'field leading htab'
- assert resp['status'] == 200, 'value leading htab status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value leading htab custom header'
- def test_http_header_value_trailing_sp(self):
- self.load('custom_header')
+def test_http_header_field_trailing_sp():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'Custom-Header': ', ',
+ 'Custom-Header ': 'blah',
'Connection': 'close',
}
- )
+ )['status']
+ == 400
+ ), 'field trailing sp'
- assert resp['status'] == 200, 'value trailing sp status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value trailing sp custom header'
- def test_http_header_value_trailing_htab(self):
- self.load('custom_header')
+def test_http_header_field_trailing_htab():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'Custom-Header': ',\t',
+ 'Custom-Header\t': 'blah',
'Connection': 'close',
}
- )
+ )['status']
+ == 400
+ ), 'field trailing htab'
- assert resp['status'] == 200, 'value trailing htab status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value trailing htab custom header'
- def test_http_header_value_both_sp(self):
- self.load('custom_header')
+def test_http_header_content_length_big():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.post(
headers={
'Host': 'localhost',
- 'Custom-Header': ' , ',
+ 'Content-Length': str(2**64),
'Connection': 'close',
- }
- )
+ },
+ body='X' * 1000,
+ )['status']
+ == 400
+ ), 'Content-Length big'
- assert resp['status'] == 200, 'value both sp status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value both sp custom header'
- def test_http_header_value_both_htab(self):
- self.load('custom_header')
+def test_http_header_content_length_negative():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.post(
headers={
'Host': 'localhost',
- 'Custom-Header': '\t,\t',
+ 'Content-Length': '-100',
'Connection': 'close',
- }
- )
+ },
+ body='X' * 1000,
+ )['status']
+ == 400
+ ), 'Content-Length negative'
- assert resp['status'] == 200, 'value both htab status'
- assert (
- resp['headers']['Custom-Header'] == ','
- ), 'value both htab custom header'
- def test_http_header_value_chars(self):
- self.load('custom_header')
+def test_http_header_content_length_text():
+ client.load('empty')
- resp = self.get(
+ assert (
+ client.post(
headers={
'Host': 'localhost',
- 'Custom-Header': r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~",
+ 'Content-Length': 'blah',
'Connection': 'close',
- }
- )
+ },
+ body='X' * 1000,
+ )['status']
+ == 400
+ ), 'Content-Length text'
- assert resp['status'] == 200, 'value chars status'
- assert (
- resp['headers']['Custom-Header']
- == r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~"
- ), 'value chars custom header'
- def test_http_header_value_chars_edge(self):
- self.load('custom_header')
+def test_http_header_content_length_multiple_values():
+ client.load('empty')
- resp = self.http(
- b"""GET / HTTP/1.1
-Host: localhost
-Custom-Header: \x20\xFF
-Connection: close
+ assert (
+ client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '41, 42',
+ 'Connection': 'close',
+ },
+ body='X' * 1000,
+ )['status']
+ == 400
+ ), 'Content-Length multiple value'
-""",
- raw=True,
- encoding='latin1',
- )
- assert resp['status'] == 200, 'value chars edge status'
- assert resp['headers']['Custom-Header'] == '\xFF', 'value chars edge'
+def test_http_header_content_length_multiple_fields():
+ client.load('empty')
- def test_http_header_value_chars_below(self):
- self.load('custom_header')
+ assert (
+ client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': ['41', '42'],
+ 'Connection': 'close',
+ },
+ body='X' * 1000,
+ )['status']
+ == 400
+ ), 'Content-Length multiple fields'
- resp = self.http(
- b"""GET / HTTP/1.1
-Host: localhost
-Custom-Header: \x1F
-Connection: close
-""",
- raw=True,
- )
+@pytest.mark.skip('not yet')
+def test_http_header_host_absent():
+ client.load('host')
- assert resp['status'] == 400, 'value chars below'
-
- def test_http_header_field_leading_sp(self):
- self.load('empty')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- ' Custom-Header': 'blah',
- 'Connection': 'close',
- }
- )['status']
- == 400
- ), 'field leading sp'
-
- def test_http_header_field_leading_htab(self):
- self.load('empty')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- '\tCustom-Header': 'blah',
- 'Connection': 'close',
- }
- )['status']
- == 400
- ), 'field leading htab'
-
- def test_http_header_field_trailing_sp(self):
- self.load('empty')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Custom-Header ': 'blah',
- 'Connection': 'close',
- }
- )['status']
- == 400
- ), 'field trailing sp'
-
- def test_http_header_field_trailing_htab(self):
- self.load('empty')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Custom-Header\t': 'blah',
- 'Connection': 'close',
- }
- )['status']
- == 400
- ), 'field trailing htab'
-
- def test_http_header_content_length_big(self):
- self.load('empty')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Length': str(2**64),
- 'Connection': 'close',
- },
- body='X' * 1000,
- )['status']
- == 400
- ), 'Content-Length big'
-
- def test_http_header_content_length_negative(self):
- self.load('empty')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '-100',
- 'Connection': 'close',
- },
- body='X' * 1000,
- )['status']
- == 400
- ), 'Content-Length negative'
-
- def test_http_header_content_length_text(self):
- self.load('empty')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Length': 'blah',
- 'Connection': 'close',
- },
- body='X' * 1000,
- )['status']
- == 400
- ), 'Content-Length text'
-
- def test_http_header_content_length_multiple_values(self):
- self.load('empty')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '41, 42',
- 'Connection': 'close',
- },
- body='X' * 1000,
- )['status']
- == 400
- ), 'Content-Length multiple value'
-
- def test_http_header_content_length_multiple_fields(self):
- self.load('empty')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Length': ['41', '42'],
- 'Connection': 'close',
- },
- body='X' * 1000,
- )['status']
- == 400
- ), 'Content-Length multiple fields'
-
- @pytest.mark.skip('not yet')
- def test_http_header_host_absent(self):
- self.load('host')
-
- resp = self.get(headers={'Connection': 'close'})
-
- assert resp['status'] == 400, 'Host absent status'
-
- def test_http_header_host_empty(self):
- self.load('host')
-
- resp = self.get(headers={'Host': '', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host empty status'
- assert resp['headers']['X-Server-Name'] != '', 'Host empty SERVER_NAME'
-
- def test_http_header_host_big(self):
- self.load('empty')
-
- assert (
- self.get(headers={'Host': 'X' * 10000, 'Connection': 'close'})[
- 'status'
- ]
- == 431
- ), 'Host big'
-
- def test_http_header_host_port(self):
- self.load('host')
-
- resp = self.get(
- headers={'Host': 'exmaple.com:7080', 'Connection': 'close'}
- )
+ resp = client.get(headers={'Connection': 'close'})
- assert resp['status'] == 200, 'Host port status'
- assert (
- resp['headers']['X-Server-Name'] == 'exmaple.com'
- ), 'Host port SERVER_NAME'
- assert (
- resp['headers']['X-Http-Host'] == 'exmaple.com:7080'
- ), 'Host port HTTP_HOST'
-
- def test_http_header_host_port_empty(self):
- self.load('host')
-
- resp = self.get(headers={'Host': 'exmaple.com:', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host port empty status'
- assert (
- resp['headers']['X-Server-Name'] == 'exmaple.com'
- ), 'Host port empty SERVER_NAME'
- assert (
- resp['headers']['X-Http-Host'] == 'exmaple.com:'
- ), 'Host port empty HTTP_HOST'
-
- def test_http_header_host_literal(self):
- self.load('host')
-
- resp = self.get(headers={'Host': '127.0.0.1', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host literal status'
- assert (
- resp['headers']['X-Server-Name'] == '127.0.0.1'
- ), 'Host literal SERVER_NAME'
-
- def test_http_header_host_literal_ipv6(self):
- self.load('host')
-
- resp = self.get(headers={'Host': '[::1]:7080', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host literal ipv6 status'
- assert (
- resp['headers']['X-Server-Name'] == '[::1]'
- ), 'Host literal ipv6 SERVER_NAME'
- assert (
- resp['headers']['X-Http-Host'] == '[::1]:7080'
- ), 'Host literal ipv6 HTTP_HOST'
-
- def test_http_header_host_trailing_period(self):
- self.load('host')
-
- resp = self.get(headers={'Host': '127.0.0.1.', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host trailing period status'
- assert (
- resp['headers']['X-Server-Name'] == '127.0.0.1'
- ), 'Host trailing period SERVER_NAME'
- assert (
- resp['headers']['X-Http-Host'] == '127.0.0.1.'
- ), 'Host trailing period HTTP_HOST'
-
- def test_http_header_host_trailing_period_2(self):
- self.load('host')
-
- resp = self.get(headers={'Host': 'EXAMPLE.COM.', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host trailing period 2 status'
- assert (
- resp['headers']['X-Server-Name'] == 'example.com'
- ), 'Host trailing period 2 SERVER_NAME'
- assert (
- resp['headers']['X-Http-Host'] == 'EXAMPLE.COM.'
- ), 'Host trailing period 2 HTTP_HOST'
-
- def test_http_header_host_case_insensitive(self):
- self.load('host')
-
- resp = self.get(headers={'Host': 'EXAMPLE.COM', 'Connection': 'close'})
-
- assert resp['status'] == 200, 'Host case insensitive'
- assert (
- resp['headers']['X-Server-Name'] == 'example.com'
- ), 'Host case insensitive SERVER_NAME'
-
- def test_http_header_host_double_dot(self):
- self.load('empty')
-
- assert (
- self.get(headers={'Host': '127.0.0..1', 'Connection': 'close'})[
- 'status'
- ]
- == 400
- ), 'Host double dot'
-
- def test_http_header_host_slash(self):
- self.load('empty')
-
- assert (
- self.get(headers={'Host': '/localhost', 'Connection': 'close'})[
- 'status'
- ]
- == 400
- ), 'Host slash'
-
- def test_http_header_host_multiple_fields(self):
- self.load('empty')
-
- assert (
- self.get(
- headers={
- 'Host': ['localhost', 'example.com'],
- 'Connection': 'close',
- }
- )['status']
- == 400
- ), 'Host multiple fields'
-
- def test_http_discard_unsafe_fields(self):
- self.load('header_fields')
-
- def check_status(header):
- resp = self.get(
- headers={
- 'Host': 'localhost',
- header: 'blah',
- 'Connection': 'close',
- }
- )
-
- assert resp['status'] == 200
- return resp
-
- resp = check_status("!Custom-Header")
- assert 'CUSTOM' not in resp['headers']['All-Headers']
-
- resp = check_status("Custom_Header")
- assert 'CUSTOM' not in resp['headers']['All-Headers']
-
- assert 'success' in self.conf(
- {'http': {'discard_unsafe_fields': False}},
- 'settings',
- )
+ assert resp['status'] == 400, 'Host absent status'
+
+
+def test_http_header_host_empty():
+ client.load('host')
+
+ resp = client.get(headers={'Host': '', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host empty status'
+ assert resp['headers']['X-Server-Name'] != '', 'Host empty SERVER_NAME'
+
+
+def test_http_header_host_big():
+ client.load('empty')
+
+ assert (
+ client.get(headers={'Host': 'X' * 10000, 'Connection': 'close'})[
+ 'status'
+ ]
+ == 431
+ ), 'Host big'
+
+
+def test_http_header_host_port():
+ client.load('host')
+
+ resp = client.get(
+ headers={'Host': 'exmaple.com:7080', 'Connection': 'close'}
+ )
+
+ assert resp['status'] == 200, 'Host port status'
+ assert (
+ resp['headers']['X-Server-Name'] == 'exmaple.com'
+ ), 'Host port SERVER_NAME'
+ assert (
+ resp['headers']['X-Http-Host'] == 'exmaple.com:7080'
+ ), 'Host port HTTP_HOST'
+
+
+def test_http_header_host_port_empty():
+ client.load('host')
+
+ resp = client.get(headers={'Host': 'exmaple.com:', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host port empty status'
+ assert (
+ resp['headers']['X-Server-Name'] == 'exmaple.com'
+ ), 'Host port empty SERVER_NAME'
+ assert (
+ resp['headers']['X-Http-Host'] == 'exmaple.com:'
+ ), 'Host port empty HTTP_HOST'
+
+
+def test_http_header_host_literal():
+ client.load('host')
+
+ resp = client.get(headers={'Host': '127.0.0.1', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host literal status'
+ assert (
+ resp['headers']['X-Server-Name'] == '127.0.0.1'
+ ), 'Host literal SERVER_NAME'
+
+
+def test_http_header_host_literal_ipv6():
+ client.load('host')
+
+ resp = client.get(headers={'Host': '[::1]:7080', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host literal ipv6 status'
+ assert (
+ resp['headers']['X-Server-Name'] == '[::1]'
+ ), 'Host literal ipv6 SERVER_NAME'
+ assert (
+ resp['headers']['X-Http-Host'] == '[::1]:7080'
+ ), 'Host literal ipv6 HTTP_HOST'
+
+
+def test_http_header_host_trailing_period():
+ client.load('host')
+
+ resp = client.get(headers={'Host': '127.0.0.1.', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host trailing period status'
+ assert (
+ resp['headers']['X-Server-Name'] == '127.0.0.1'
+ ), 'Host trailing period SERVER_NAME'
+ assert (
+ resp['headers']['X-Http-Host'] == '127.0.0.1.'
+ ), 'Host trailing period HTTP_HOST'
- resp = check_status("!#$%&'*+.^`|~Custom_Header")
- assert 'CUSTOM' in resp['headers']['All-Headers']
- assert 'success' in self.conf(
- {'http': {'discard_unsafe_fields': True}},
- 'settings',
+def test_http_header_host_trailing_period_2():
+ client.load('host')
+
+ resp = client.get(headers={'Host': 'EXAMPLE.COM.', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host trailing period 2 status'
+ assert (
+ resp['headers']['X-Server-Name'] == 'example.com'
+ ), 'Host trailing period 2 SERVER_NAME'
+ assert (
+ resp['headers']['X-Http-Host'] == 'EXAMPLE.COM.'
+ ), 'Host trailing period 2 HTTP_HOST'
+
+
+def test_http_header_host_case_insensitive():
+ client.load('host')
+
+ resp = client.get(headers={'Host': 'EXAMPLE.COM', 'Connection': 'close'})
+
+ assert resp['status'] == 200, 'Host case insensitive'
+ assert (
+ resp['headers']['X-Server-Name'] == 'example.com'
+ ), 'Host case insensitive SERVER_NAME'
+
+
+def test_http_header_host_double_dot():
+ client.load('empty')
+
+ assert (
+ client.get(headers={'Host': '127.0.0..1', 'Connection': 'close'})[
+ 'status'
+ ]
+ == 400
+ ), 'Host double dot'
+
+
+def test_http_header_host_slash():
+ client.load('empty')
+
+ assert (
+ client.get(headers={'Host': '/localhost', 'Connection': 'close'})[
+ 'status'
+ ]
+ == 400
+ ), 'Host slash'
+
+
+def test_http_header_host_multiple_fields():
+ client.load('empty')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': ['localhost', 'example.com'],
+ 'Connection': 'close',
+ }
+ )['status']
+ == 400
+ ), 'Host multiple fields'
+
+
+def test_http_discard_unsafe_fields():
+ client.load('header_fields')
+
+ def check_status(header):
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ header: 'blah',
+ 'Connection': 'close',
+ }
)
- resp = check_status("!Custom-Header")
- assert 'CUSTOM' not in resp['headers']['All-Headers']
+ assert resp['status'] == 200
+ return resp
+
+ resp = check_status("!Custom-Header")
+ assert 'CUSTOM' not in resp['headers']['All-Headers']
+
+ resp = check_status("Custom_Header")
+ assert 'CUSTOM' not in resp['headers']['All-Headers']
+
+ assert 'success' in client.conf(
+ {'http': {'discard_unsafe_fields': False}},
+ 'settings',
+ )
+
+ resp = check_status("!#$%&'*+.^`|~Custom_Header")
+ assert 'CUSTOM' in resp['headers']['All-Headers']
+
+ assert 'success' in client.conf(
+ {'http': {'discard_unsafe_fields': True}},
+ 'settings',
+ )
+
+ resp = check_status("!Custom-Header")
+ assert 'CUSTOM' not in resp['headers']['All-Headers']
- resp = check_status("Custom_Header")
- assert 'CUSTOM' not in resp['headers']['All-Headers']
+ resp = check_status("Custom_Header")
+ assert 'CUSTOM' not in resp['headers']['All-Headers']
diff --git a/test/test_java_application.py b/test/test_java_application.py
index 6ff556a8..a8814583 100644
--- a/test/test_java_application.py
+++ b/test/test_java_application.py
@@ -3,1033 +3,1014 @@ import os
import re
import time
-from unit.applications.lang.java import TestApplicationJava
+from unit.applications.lang.java import ApplicationJava
from unit.option import option
from unit.utils import public_dir
+prerequisites = {'modules': {'java': 'all'}}
+
+client = ApplicationJava()
+
+
+def test_java_conf_error(temp_dir, skip_alert):
+ skip_alert(
+ r'realpath.*failed',
+ r'failed to apply new conf',
+ r'application setup failed',
+ )
+ assert 'error' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/app"}},
+ "applications": {
+ "app": {
+ "type": client.get_application_type(),
+ "processes": 1,
+ "working_directory": f"{option.test_dir}/java/empty",
+ "webapp": f"{temp_dir}/java",
+ "unit_jars": f"{temp_dir}/no_such_dir",
+ }
+ },
+ }
+ ), 'conf error'
-class TestJavaApplication(TestApplicationJava):
- prerequisites = {'modules': {'java': 'all'}}
- def test_java_conf_error(self, temp_dir, skip_alert):
- skip_alert(
- r'realpath.*failed',
- r'failed to apply new conf',
- r'application setup failed',
- )
- assert 'error' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/app"}},
- "applications": {
- "app": {
- "type": self.get_application_type(),
- "processes": 1,
- "working_directory": f"{option.test_dir}/java/empty",
- "webapp": f"{temp_dir}/java",
- "unit_jars": f"{temp_dir}/no_such_dir",
- }
- },
- }
- ), 'conf error'
+def test_java_war(temp_dir):
+ client.load('empty_war')
- def test_java_war(self, temp_dir):
- self.load('empty_war')
+ assert 'success' in client.conf(
+ f'"{temp_dir}/java/empty.war"',
+ '/config/applications/empty_war/webapp',
+ ), 'configure war'
- assert 'success' in self.conf(
- f'"{temp_dir}/java/empty.war"',
- '/config/applications/empty_war/webapp',
- ), 'configure war'
+ assert client.get()['status'] == 200, 'war'
- assert self.get()['status'] == 200, 'war'
- def test_java_application_cookies(self):
- self.load('cookies')
+def test_java_application_cookies():
+ client.load('cookies')
- headers = self.get(
- headers={
- 'Cookie': 'var1=val1; var2=val2',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )['headers']
+ headers = client.get(
+ headers={
+ 'Cookie': 'var1=val1; var2=val2',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )['headers']
- assert headers['X-Cookie-1'] == 'val1', 'cookie 1'
- assert headers['X-Cookie-2'] == 'val2', 'cookie 2'
+ assert headers['X-Cookie-1'] == 'val1', 'cookie 1'
+ assert headers['X-Cookie-2'] == 'val2', 'cookie 2'
- def test_java_application_filter(self):
- self.load('filter')
- headers = self.get()['headers']
+def test_java_application_filter():
+ client.load('filter')
- assert headers['X-Filter-Before'] == '1', 'filter before'
- assert headers['X-Filter-After'] == '1', 'filter after'
+ headers = client.get()['headers']
- assert (
- self.get(url='/test')['headers']['X-Filter-After'] == '0'
- ), 'filter after 2'
+ assert headers['X-Filter-Before'] == '1', 'filter before'
+ assert headers['X-Filter-After'] == '1', 'filter after'
- def test_java_application_get_variables(self):
- self.load('get_params')
+ assert (
+ client.get(url='/test')['headers']['X-Filter-After'] == '0'
+ ), 'filter after 2'
- def check_header(header, expect):
- values = header.split(' ')[:-1]
- assert len(values) == len(expect)
- assert set(values) == set(expect)
- headers = self.get(url='/?var1=val1&var2=&var4=val4&var4=foo')[
- 'headers'
- ]
+def test_java_application_get_variables():
+ client.load('get_params')
- assert headers['X-Var-1'] == 'val1', 'GET variables'
- assert headers['X-Var-2'] == 'true', 'GET variables 2'
- assert headers['X-Var-3'] == 'false', 'GET variables 3'
+ def check_header(header, expect):
+ values = header.split(' ')[:-1]
+ assert len(values) == len(expect)
+ assert set(values) == set(expect)
- check_header(headers['X-Param-Names'], ['var4', 'var2', 'var1'])
- check_header(headers['X-Param-Values'], ['val4', 'foo'])
- check_header(
- headers['X-Param-Map'], ['var2=', 'var1=val1', 'var4=val4,foo']
- )
+ headers = client.get(url='/?var1=val1&var2=&var4=val4&var4=foo')['headers']
- def test_java_application_post_variables(self):
- self.load('post_params')
+ assert headers['X-Var-1'] == 'val1', 'GET variables'
+ assert headers['X-Var-2'] == 'true', 'GET variables 2'
+ assert headers['X-Var-3'] == 'false', 'GET variables 3'
- headers = self.post(
- headers={
- 'Content-Type': 'application/x-www-form-urlencoded',
- 'Host': 'localhost',
- 'Connection': 'close',
- },
- body='var1=val1&var2=',
- )['headers']
+ check_header(headers['X-Param-Names'], ['var4', 'var2', 'var1'])
+ check_header(headers['X-Param-Values'], ['val4', 'foo'])
+ check_header(
+ headers['X-Param-Map'], ['var2=', 'var1=val1', 'var4=val4,foo']
+ )
- assert headers['X-Var-1'] == 'val1', 'POST variables'
- assert headers['X-Var-2'] == 'true', 'POST variables 2'
- assert headers['X-Var-3'] == 'false', 'POST variables 3'
- def test_java_application_session(self):
- self.load('session')
+def test_java_application_post_variables():
+ client.load('post_params')
- headers = self.get(url='/?var1=val1')['headers']
- session_id = headers['X-Session-Id']
+ headers = client.post(
+ headers={
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ },
+ body='var1=val1&var2=',
+ )['headers']
- assert headers['X-Var-1'] == 'null', 'variable empty'
- assert headers['X-Session-New'] == 'true', 'session create'
+ assert headers['X-Var-1'] == 'val1', 'POST variables'
+ assert headers['X-Var-2'] == 'true', 'POST variables 2'
+ assert headers['X-Var-3'] == 'false', 'POST variables 3'
- headers = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- },
- url='/?var1=val2',
- )['headers']
- assert headers['X-Var-1'] == 'val1', 'variable'
- assert headers['X-Session-New'] == 'false', 'session resume'
- assert session_id == headers['X-Session-Id'], 'session same id'
+def test_java_application_session():
+ client.load('session')
- def test_java_application_session_active(self):
- self.load('session_inactive')
+ headers = client.get(url='/?var1=val1')['headers']
+ session_id = headers['X-Session-Id']
- resp = self.get(
- headers={
- 'X-Interval': '4',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )
- session_id = resp['headers']['X-Session-Id']
-
- assert resp['status'] == 200, 'session init'
- assert resp['headers']['X-Session-Interval'] == '4', 'session interval'
- assert (
- abs(
- self.date_to_sec_epoch(
- resp['headers']['X-Session-Last-Access-Time']
- )
- - self.sec_epoch()
- )
- < 5
- ), 'session last access time'
-
- time.sleep(1)
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- }
- )
+ assert headers['X-Var-1'] == 'null', 'variable empty'
+ assert headers['X-Session-New'] == 'true', 'session create'
- assert resp['headers']['X-Session-Id'] == session_id, 'session active'
+ headers = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ },
+ url='/?var1=val2',
+ )['headers']
- session_id = resp['headers']['X-Session-Id']
+ assert headers['X-Var-1'] == 'val1', 'variable'
+ assert headers['X-Session-New'] == 'false', 'session resume'
+ assert session_id == headers['X-Session-Id'], 'session same id'
- time.sleep(1)
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- }
+def test_java_application_session_active(date_to_sec_epoch, sec_epoch):
+ client.load('session_inactive')
+
+ resp = client.get(
+ headers={
+ 'X-Interval': '4',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )
+ session_id = resp['headers']['X-Session-Id']
+
+ assert resp['status'] == 200, 'session init'
+ assert resp['headers']['X-Session-Interval'] == '4', 'session interval'
+ assert (
+ abs(
+ date_to_sec_epoch(resp['headers']['X-Session-Last-Access-Time'])
+ - sec_epoch
)
+ < 5
+ ), 'session last access time'
- assert resp['headers']['X-Session-Id'] == session_id, 'session active 2'
+ time.sleep(1)
- time.sleep(2)
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ }
+ )
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- }
- )
+ assert resp['headers']['X-Session-Id'] == session_id, 'session active'
- assert resp['headers']['X-Session-Id'] == session_id, 'session active 3'
+ session_id = resp['headers']['X-Session-Id']
- def test_java_application_session_inactive(self):
- self.load('session_inactive')
+ time.sleep(1)
- resp = self.get(
- headers={
- 'X-Interval': '1',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )
- session_id = resp['headers']['X-Session-Id']
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ }
+ )
- time.sleep(3)
+ assert resp['headers']['X-Session-Id'] == session_id, 'session active 2'
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- }
- )
+ time.sleep(2)
- assert resp['headers']['X-Session-Id'] != session_id, 'session inactive'
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ }
+ )
- def test_java_application_session_invalidate(self):
- self.load('session_invalidate')
+ assert resp['headers']['X-Session-Id'] == session_id, 'session active 3'
- resp = self.get()
- session_id = resp['headers']['X-Session-Id']
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- }
- )
+def test_java_application_session_inactive():
+ client.load('session_inactive')
- assert (
- resp['headers']['X-Session-Id'] != session_id
- ), 'session invalidate'
+ resp = client.get(
+ headers={
+ 'X-Interval': '1',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )
+ session_id = resp['headers']['X-Session-Id']
- def test_java_application_session_listeners(self):
- self.load('session_listeners')
+ time.sleep(3)
- headers = self.get(url='/test?var1=val1')['headers']
- session_id = headers['X-Session-Id']
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ }
+ )
- assert headers['X-Session-Created'] == session_id, 'session create'
- assert headers['X-Attr-Added'] == 'var1=val1', 'attribute add'
+ assert resp['headers']['X-Session-Id'] != session_id, 'session inactive'
- headers = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- },
- url='/?var1=val2',
- )['headers']
- assert session_id == headers['X-Session-Id'], 'session same id'
- assert headers['X-Attr-Replaced'] == 'var1=val1', 'attribute replace'
+def test_java_application_session_invalidate():
+ client.load('session_invalidate')
- headers = self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': f'JSESSIONID={session_id}',
- 'Connection': 'close',
- },
- url='/',
- )['headers']
+ resp = client.get()
+ session_id = resp['headers']['X-Session-Id']
- assert session_id == headers['X-Session-Id'], 'session same id'
- assert headers['X-Attr-Removed'] == 'var1=val2', 'attribute remove'
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ }
+ )
- def test_java_application_jsp(self):
- self.load('jsp')
+ assert resp['headers']['X-Session-Id'] != session_id, 'session invalidate'
- headers = self.get(url='/index.jsp')['headers']
- assert headers['X-Unit-JSP'] == 'ok', 'JSP Ok header'
+def test_java_application_session_listeners():
+ client.load('session_listeners')
- def test_java_application_url_pattern(self):
- self.load('url_pattern')
+ headers = client.get(url='/test?var1=val1')['headers']
+ session_id = headers['X-Session-Id']
- headers = self.get(url='/foo/bar/index.html')['headers']
+ assert headers['X-Session-Created'] == session_id, 'session create'
+ assert headers['X-Attr-Added'] == 'var1=val1', 'attribute add'
- assert headers['X-Id'] == 'servlet1', '#1 Servlet1 request'
- assert (
- headers['X-Request-URI'] == '/foo/bar/index.html'
- ), '#1 request URI'
- assert headers['X-Servlet-Path'] == '/foo/bar', '#1 servlet path'
- assert headers['X-Path-Info'] == '/index.html', '#1 path info'
+ headers = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ },
+ url='/?var1=val2',
+ )['headers']
- headers = self.get(url='/foo/bar/index.bop')['headers']
+ assert session_id == headers['X-Session-Id'], 'session same id'
+ assert headers['X-Attr-Replaced'] == 'var1=val1', 'attribute replace'
- assert headers['X-Id'] == 'servlet1', '#2 Servlet1 request'
- assert (
- headers['X-Request-URI'] == '/foo/bar/index.bop'
- ), '#2 request URI'
- assert headers['X-Servlet-Path'] == '/foo/bar', '#2 servlet path'
- assert headers['X-Path-Info'] == '/index.bop', '#2 path info'
+ headers = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': f'JSESSIONID={session_id}',
+ 'Connection': 'close',
+ },
+ url='/',
+ )['headers']
- headers = self.get(url='/baz')['headers']
+ assert session_id == headers['X-Session-Id'], 'session same id'
+ assert headers['X-Attr-Removed'] == 'var1=val2', 'attribute remove'
- assert headers['X-Id'] == 'servlet2', '#3 Servlet2 request'
- assert headers['X-Request-URI'] == '/baz', '#3 request URI'
- assert headers['X-Servlet-Path'] == '/baz', '#3 servlet path'
- assert headers['X-Path-Info'] == 'null', '#3 path info'
- headers = self.get(url='/baz/index.html')['headers']
+def test_java_application_jsp():
+ client.load('jsp')
- assert headers['X-Id'] == 'servlet2', '#4 Servlet2 request'
- assert headers['X-Request-URI'] == '/baz/index.html', '#4 request URI'
- assert headers['X-Servlet-Path'] == '/baz', '#4 servlet path'
- assert headers['X-Path-Info'] == '/index.html', '#4 path info'
+ headers = client.get(url='/index.jsp')['headers']
- headers = self.get(url='/catalog')['headers']
+ assert headers['X-Unit-JSP'] == 'ok', 'JSP Ok header'
- assert headers['X-Id'] == 'servlet3', '#5 Servlet3 request'
- assert headers['X-Request-URI'] == '/catalog', '#5 request URI'
- assert headers['X-Servlet-Path'] == '/catalog', '#5 servlet path'
- assert headers['X-Path-Info'] == 'null', '#5 path info'
- headers = self.get(url='/catalog/index.html')['headers']
+def test_java_application_url_pattern():
+ client.load('url_pattern')
- assert headers['X-Id'] == 'default', '#6 default request'
- assert (
- headers['X-Request-URI'] == '/catalog/index.html'
- ), '#6 request URI'
- assert (
- headers['X-Servlet-Path'] == '/catalog/index.html'
- ), '#6 servlet path'
- assert headers['X-Path-Info'] == 'null', '#6 path info'
-
- headers = self.get(url='/catalog/racecar.bop')['headers']
-
- assert headers['X-Id'] == 'servlet4', '#7 servlet4 request'
- assert (
- headers['X-Request-URI'] == '/catalog/racecar.bop'
- ), '#7 request URI'
- assert (
- headers['X-Servlet-Path'] == '/catalog/racecar.bop'
- ), '#7 servlet path'
- assert headers['X-Path-Info'] == 'null', '#7 path info'
-
- headers = self.get(url='/index.bop')['headers']
-
- assert headers['X-Id'] == 'servlet4', '#8 servlet4 request'
- assert headers['X-Request-URI'] == '/index.bop', '#8 request URI'
- assert headers['X-Servlet-Path'] == '/index.bop', '#8 servlet path'
- assert headers['X-Path-Info'] == 'null', '#8 path info'
-
- headers = self.get(url='/foo/baz')['headers']
-
- assert headers['X-Id'] == 'servlet0', '#9 servlet0 request'
- assert headers['X-Request-URI'] == '/foo/baz', '#9 request URI'
- assert headers['X-Servlet-Path'] == '/foo', '#9 servlet path'
- assert headers['X-Path-Info'] == '/baz', '#9 path info'
-
- headers = self.get()['headers']
-
- assert headers['X-Id'] == 'default', '#10 default request'
- assert headers['X-Request-URI'] == '/', '#10 request URI'
- assert headers['X-Servlet-Path'] == '/', '#10 servlet path'
- assert headers['X-Path-Info'] == 'null', '#10 path info'
-
- headers = self.get(url='/index.bop/')['headers']
-
- assert headers['X-Id'] == 'default', '#11 default request'
- assert headers['X-Request-URI'] == '/index.bop/', '#11 request URI'
- assert headers['X-Servlet-Path'] == '/index.bop/', '#11 servlet path'
- assert headers['X-Path-Info'] == 'null', '#11 path info'
-
- def test_java_application_header(self):
- self.load('header')
-
- headers = self.get()['headers']
-
- assert headers['X-Set-Utf8-Value'] == '????', 'set Utf8 header value'
- assert headers['X-Set-Utf8-Name-???'] == 'x', 'set Utf8 header name'
- assert headers['X-Add-Utf8-Value'] == '????', 'add Utf8 header value'
- assert headers['X-Add-Utf8-Name-???'] == 'y', 'add Utf8 header name'
- assert headers['X-Add-Test'] == 'v1', 'add null header'
- assert ('X-Set-Test1' in headers) == False, 'set null header'
- assert headers['X-Set-Test2'] == '', 'set empty header'
-
- def test_java_application_content_type(self):
- self.load('content_type')
-
- headers = self.get(url='/1')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=utf-8'
- ), '#1 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=utf-8'
- ), '#1 response Content-Type'
- assert headers['X-Character-Encoding'] == 'utf-8', '#1 response charset'
-
- headers = self.get(url='/2')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=iso-8859-1'
- ), '#2 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=iso-8859-1'
- ), '#2 response Content-Type'
- assert (
- headers['X-Character-Encoding'] == 'iso-8859-1'
- ), '#2 response charset'
-
- headers = self.get(url='/3')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=windows-1251'
- ), '#3 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=windows-1251'
- ), '#3 response Content-Type'
- assert (
- headers['X-Character-Encoding'] == 'windows-1251'
- ), '#3 response charset'
-
- headers = self.get(url='/4')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=windows-1251'
- ), '#4 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=windows-1251'
- ), '#4 response Content-Type'
- assert (
- headers['X-Character-Encoding'] == 'windows-1251'
- ), '#4 response charset'
-
- headers = self.get(url='/5')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=iso-8859-1'
- ), '#5 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=iso-8859-1'
- ), '#5 response Content-Type'
- assert (
- headers['X-Character-Encoding'] == 'iso-8859-1'
- ), '#5 response charset'
-
- headers = self.get(url='/6')['headers']
-
- assert ('Content-Type' in headers) == False, '#6 no Content-Type header'
- assert (
- 'X-Content-Type' in headers
- ) == False, '#6 no response Content-Type'
- assert headers['X-Character-Encoding'] == 'utf-8', '#6 response charset'
-
- headers = self.get(url='/7')['headers']
-
- assert (
- headers['Content-Type'] == 'text/plain;charset=utf-8'
- ), '#7 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/plain;charset=utf-8'
- ), '#7 response Content-Type'
- assert headers['X-Character-Encoding'] == 'utf-8', '#7 response charset'
-
- headers = self.get(url='/8')['headers']
-
- assert (
- headers['Content-Type'] == 'text/html;charset=utf-8'
- ), '#8 Content-Type header'
- assert (
- headers['X-Content-Type'] == 'text/html;charset=utf-8'
- ), '#8 response Content-Type'
- assert headers['X-Character-Encoding'] == 'utf-8', '#8 response charset'
-
- def test_java_application_welcome_files(self):
- self.load('welcome_files')
-
- headers = self.get()['headers']
-
- resp = self.get(url='/dir1')
-
- assert resp['status'] == 302, 'dir redirect expected'
-
- resp = self.get(url='/dir1/')
-
- assert ('This is index.txt.' in resp['body']) == True, 'dir1 index body'
- assert resp['headers']['X-TXT-Filter'] == '1', 'TXT Filter header'
-
- headers = self.get(url='/dir2/')['headers']
-
- assert headers['X-Unit-JSP'] == 'ok', 'JSP Ok header'
- assert headers['X-JSP-Filter'] == '1', 'JSP Filter header'
-
- headers = self.get(url='/dir3/')['headers']
-
- assert (
- headers['X-App-Servlet'] == '1'
- ), 'URL pattern overrides welcome file'
-
- headers = self.get(url='/dir4/')['headers']
-
- assert (
- 'X-App-Servlet' in headers
- ) == False, 'Static welcome file served first'
-
- headers = self.get(url='/dir5/')['headers']
-
- assert (
- headers['X-App-Servlet'] == '1'
- ), 'Servlet for welcome file served when no static file found'
-
- def test_java_application_request_listeners(self):
- self.load('request_listeners')
-
- headers = self.get(url='/test1')['headers']
-
- assert (
- headers['X-Request-Initialized'] == '/test1'
- ), 'request initialized event'
- assert headers['X-Request-Destroyed'] == '', 'request destroyed event'
- assert headers['X-Attr-Added'] == '', 'attribute added event'
- assert headers['X-Attr-Removed'] == '', 'attribute removed event'
- assert headers['X-Attr-Replaced'] == '', 'attribute replaced event'
-
- headers = self.get(url='/test2?var1=1')['headers']
-
- assert (
- headers['X-Request-Initialized'] == '/test2'
- ), 'request initialized event'
- assert (
- headers['X-Request-Destroyed'] == '/test1'
- ), 'request destroyed event'
- assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
- assert headers['X-Attr-Removed'] == 'var=1;', 'attribute removed event'
- assert headers['X-Attr-Replaced'] == '', 'attribute replaced event'
-
- headers = self.get(url='/test3?var1=1&var2=2')['headers']
-
- assert (
- headers['X-Request-Initialized'] == '/test3'
- ), 'request initialized event'
- assert (
- headers['X-Request-Destroyed'] == '/test2'
- ), 'request destroyed event'
- assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
- assert headers['X-Attr-Removed'] == 'var=2;', 'attribute removed event'
- assert (
- headers['X-Attr-Replaced'] == 'var=1;'
- ), 'attribute replaced event'
-
- headers = self.get(url='/test4?var1=1&var2=2&var3=3')['headers']
+ headers = client.get(url='/foo/bar/index.html')['headers']
- assert (
- headers['X-Request-Initialized'] == '/test4'
- ), 'request initialized event'
- assert (
- headers['X-Request-Destroyed'] == '/test3'
- ), 'request destroyed event'
- assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
- assert headers['X-Attr-Removed'] == '', 'attribute removed event'
- assert (
- headers['X-Attr-Replaced'] == 'var=1;var=2;'
- ), 'attribute replaced event'
+ assert headers['X-Id'] == 'servlet1', '#1 Servlet1 request'
+ assert headers['X-Request-URI'] == '/foo/bar/index.html', '#1 request URI'
+ assert headers['X-Servlet-Path'] == '/foo/bar', '#1 servlet path'
+ assert headers['X-Path-Info'] == '/index.html', '#1 path info'
- def test_java_application_request_uri_forward(self):
- self.load('forward')
+ headers = client.get(url='/foo/bar/index.bop')['headers']
- resp = self.get(
- url='/fwd?uri=%2Fdata%2Ftest%3Furi%3Dnew_uri%26a%3D2%26b%3D3&a=1&c=4'
- )
- headers = resp['headers']
-
- assert (
- headers['X-REQUEST-Id'] == 'fwd'
- ), 'initial request servlet mapping'
- assert (
- headers['X-Forward-To'] == '/data/test?uri=new_uri&a=2&b=3'
- ), 'forwarding triggered'
- assert (
- headers['X-REQUEST-Param-uri'] == '/data/test?uri=new_uri&a=2&b=3'
- ), 'original uri parameter'
- assert headers['X-REQUEST-Param-a'] == '1', 'original a parameter'
- assert headers['X-REQUEST-Param-c'] == '4', 'original c parameter'
-
- assert (
- headers['X-FORWARD-Id'] == 'data'
- ), 'forward request servlet mapping'
- assert (
- headers['X-FORWARD-Request-URI'] == '/data/test'
- ), 'forward request uri'
- assert (
- headers['X-FORWARD-Servlet-Path'] == '/data'
- ), 'forward request servlet path'
- assert (
- headers['X-FORWARD-Path-Info'] == '/test'
- ), 'forward request path info'
- assert (
- headers['X-FORWARD-Query-String'] == 'uri=new_uri&a=2&b=3'
- ), 'forward request query string'
- assert (
- headers['X-FORWARD-Param-uri']
- == 'new_uri,/data/test?uri=new_uri&a=2&b=3'
- ), 'forward uri parameter'
- assert headers['X-FORWARD-Param-a'] == '2,1', 'forward a parameter'
- assert headers['X-FORWARD-Param-b'] == '3', 'forward b parameter'
- assert headers['X-FORWARD-Param-c'] == '4', 'forward c parameter'
-
- assert (
- headers['X-javax.servlet.forward.request_uri'] == '/fwd'
- ), 'original request uri'
- assert (
- headers['X-javax.servlet.forward.context_path'] == ''
- ), 'original request context path'
- assert (
- headers['X-javax.servlet.forward.servlet_path'] == '/fwd'
- ), 'original request servlet path'
- assert (
- headers['X-javax.servlet.forward.path_info'] == 'null'
- ), 'original request path info'
- assert (
- headers['X-javax.servlet.forward.query_string']
- == 'uri=%2Fdata%2Ftest%3Furi%3Dnew_uri%26a%3D2%26b%3D3&a=1&c=4'
- ), 'original request query'
-
- assert (
- 'Before forwarding' in resp['body']
- ) == False, 'discarded data added before forward() call'
- assert (
- 'X-After-Forwarding' in headers
- ) == False, 'cannot add headers after forward() call'
- assert (
- 'After forwarding' in resp['body']
- ) == False, 'cannot add data after forward() call'
-
- def test_java_application_named_dispatcher_forward(self):
- self.load('forward')
-
- resp = self.get(url='/fwd?disp=name&uri=data')
- headers = resp['headers']
-
- assert (
- headers['X-REQUEST-Id'] == 'fwd'
- ), 'initial request servlet mapping'
- assert headers['X-Forward-To'] == 'data', 'forwarding triggered'
-
- assert (
- headers['X-FORWARD-Id'] == 'data'
- ), 'forward request servlet mapping'
- assert headers['X-FORWARD-Request-URI'] == '/fwd', 'forward request uri'
- assert (
- headers['X-FORWARD-Servlet-Path'] == '/fwd'
- ), 'forward request servlet path'
- assert (
- headers['X-FORWARD-Path-Info'] == 'null'
- ), 'forward request path info'
- assert (
- headers['X-FORWARD-Query-String'] == 'disp=name&uri=data'
- ), 'forward request query string'
-
- assert (
- headers['X-javax.servlet.forward.request_uri'] == 'null'
- ), 'original request uri'
- assert (
- headers['X-javax.servlet.forward.context_path'] == 'null'
- ), 'original request context path'
- assert (
- headers['X-javax.servlet.forward.servlet_path'] == 'null'
- ), 'original request servlet path'
- assert (
- headers['X-javax.servlet.forward.path_info'] == 'null'
- ), 'original request path info'
- assert (
- headers['X-javax.servlet.forward.query_string'] == 'null'
- ), 'original request query'
-
- assert (
- 'Before forwarding' in resp['body']
- ) == False, 'discarded data added before forward() call'
- assert (
- 'X-After-Forwarding' in headers
- ) == False, 'cannot add headers after forward() call'
- assert (
- 'After forwarding' in resp['body']
- ) == False, 'cannot add data after forward() call'
-
- def test_java_application_request_uri_include(self):
- self.load('include')
-
- resp = self.get(url='/inc?uri=/data/test')
- headers = resp['headers']
- body = resp['body']
-
- assert (
- headers['X-REQUEST-Id'] == 'inc'
- ), 'initial request servlet mapping'
- assert headers['X-Include'] == '/data/test', 'including triggered'
-
- assert (
- 'X-INCLUDE-Id' in headers
- ) == False, 'unable to add headers in include request'
-
- assert (
- 'javax.servlet.include.request_uri: /data/test' in body
- ) == True, 'include request uri'
- # assert (
- # 'javax.servlet.include.context_path: ' in body
- # ) == True, 'include request context path'
- assert (
- 'javax.servlet.include.servlet_path: /data' in body
- ) == True, 'include request servlet path'
- assert (
- 'javax.servlet.include.path_info: /test' in body
- ) == True, 'include request path info'
- assert (
- 'javax.servlet.include.query_string: null' in body
- ) == True, 'include request query'
-
- assert (
- 'Before include' in body
- ) == True, 'preserve data added before include() call'
- assert (
- headers['X-After-Include'] == 'you-should-see-this'
- ), 'add headers after include() call'
- assert (
- 'After include' in body
- ) == True, 'add data after include() call'
-
- def test_java_application_named_dispatcher_include(self):
- self.load('include')
-
- resp = self.get(url='/inc?disp=name&uri=data')
- headers = resp['headers']
- body = resp['body']
-
- assert (
- headers['X-REQUEST-Id'] == 'inc'
- ), 'initial request servlet mapping'
- assert headers['X-Include'] == 'data', 'including triggered'
-
- assert (
- 'X-INCLUDE-Id' in headers
- ) == False, 'unable to add headers in include request'
-
- assert (
- 'javax.servlet.include.request_uri: null' in body
- ) == True, 'include request uri'
- # assert (
- # 'javax.servlet.include.context_path: null' in body
- # ) == True, 'include request context path'
- assert (
- 'javax.servlet.include.servlet_path: null' in body
- ) == True, 'include request servlet path'
- assert (
- 'javax.servlet.include.path_info: null' in body
- ) == True, 'include request path info'
- assert (
- 'javax.servlet.include.query_string: null' in body
- ) == True, 'include request query'
-
- assert (
- 'Before include' in body
- ) == True, 'preserve data added before include() call'
- assert (
- headers['X-After-Include'] == 'you-should-see-this'
- ), 'add headers after include() call'
- assert (
- 'After include' in body
- ) == True, 'add data after include() call'
-
- def test_java_application_path_translation(self):
- self.load('path_translation')
-
- headers = self.get(url='/pt/test?path=/')['headers']
-
- assert headers['X-Servlet-Path'] == '/pt', 'matched servlet path'
- assert headers['X-Path-Info'] == '/test', 'the rest of the path'
- assert (
- headers['X-Path-Translated']
- == f"{headers['X-Real-Path']}{headers['X-Path-Info']}"
- ), 'translated path is the app root + path info'
- assert (
- headers['X-Resource-Paths'].endswith('/WEB-INF/, /index.html]')
- == True
- ), 'app root directory content'
- assert (
- headers['X-Resource-As-Stream'] == 'null'
- ), 'no resource stream for root path'
-
- headers = self.get(url='/test?path=/none')['headers']
-
- assert headers['X-Servlet-Path'] == '/test', 'matched whole path'
- assert (
- headers['X-Path-Info'] == 'null'
- ), 'the rest of the path is null, whole path matched'
- assert (
- headers['X-Path-Translated'] == 'null'
- ), 'translated path is null because path info is null'
- assert (
- headers['X-Real-Path'].endswith('/none') == True
- ), 'read path is not null'
- assert headers['X-Resource-Paths'] == 'null', 'no resource found'
- assert headers['X-Resource-As-Stream'] == 'null', 'no resource stream'
-
- def test_java_application_query_string(self):
- self.load('query_string')
-
- assert (
- self.get(url='/?a=b')['headers']['X-Query-String'] == 'a=b'
- ), 'query string'
-
- def test_java_application_query_empty(self):
- self.load('query_string')
-
- assert (
- self.get(url='/?')['headers']['X-Query-String'] == ''
- ), 'query string empty'
-
- def test_java_application_query_absent(self):
- self.load('query_string')
-
- assert (
- self.get()['headers']['X-Query-String'] == 'null'
- ), 'query string absent'
-
- def test_java_application_empty(self):
- self.load('empty')
-
- assert self.get()['status'] == 200, 'empty'
-
- def test_java_application_keepalive_body(self):
- self.load('mirror')
-
- assert self.post()['status'] == 200, 'init'
-
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Connection': 'keep-alive',
- 'Content-Type': 'text/html',
- 'Host': 'localhost',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ assert headers['X-Id'] == 'servlet1', '#2 Servlet1 request'
+ assert headers['X-Request-URI'] == '/foo/bar/index.bop', '#2 request URI'
+ assert headers['X-Servlet-Path'] == '/foo/bar', '#2 servlet path'
+ assert headers['X-Path-Info'] == '/index.bop', '#2 path info'
- assert resp['body'] == body, 'keep-alive 1'
+ headers = client.get(url='/baz')['headers']
- body = '0123456789'
- resp = self.post(
- headers={
- 'Connection': 'close',
- 'Content-Type': 'text/html',
- 'Host': 'localhost',
- },
- sock=sock,
- body=body,
- )
+ assert headers['X-Id'] == 'servlet2', '#3 Servlet2 request'
+ assert headers['X-Request-URI'] == '/baz', '#3 request URI'
+ assert headers['X-Servlet-Path'] == '/baz', '#3 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#3 path info'
- assert resp['body'] == body, 'keep-alive 2'
+ headers = client.get(url='/baz/index.html')['headers']
- def test_java_application_http_10(self):
- self.load('empty')
+ assert headers['X-Id'] == 'servlet2', '#4 Servlet2 request'
+ assert headers['X-Request-URI'] == '/baz/index.html', '#4 request URI'
+ assert headers['X-Servlet-Path'] == '/baz', '#4 servlet path'
+ assert headers['X-Path-Info'] == '/index.html', '#4 path info'
- assert self.get(http_10=True)['status'] == 200, 'HTTP 1.0'
+ headers = client.get(url='/catalog')['headers']
- def test_java_application_no_method(self):
- self.load('empty')
+ assert headers['X-Id'] == 'servlet3', '#5 Servlet3 request'
+ assert headers['X-Request-URI'] == '/catalog', '#5 request URI'
+ assert headers['X-Servlet-Path'] == '/catalog', '#5 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#5 path info'
- assert self.post()['status'] == 405, 'no method'
+ headers = client.get(url='/catalog/index.html')['headers']
- def test_java_application_get_header(self):
- self.load('get_header')
+ assert headers['X-Id'] == 'default', '#6 default request'
+ assert headers['X-Request-URI'] == '/catalog/index.html', '#6 request URI'
+ assert headers['X-Servlet-Path'] == '/catalog/index.html', '#6 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#6 path info'
- assert (
- self.get(
- headers={
- 'X-Header': 'blah',
- 'Content-Type': 'text/html',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )['headers']['X-Reply']
- == 'blah'
- ), 'get header'
+ headers = client.get(url='/catalog/racecar.bop')['headers']
- def test_java_application_get_header_empty(self):
- self.load('get_header')
+ assert headers['X-Id'] == 'servlet4', '#7 servlet4 request'
+ assert headers['X-Request-URI'] == '/catalog/racecar.bop', '#7 request URI'
+ assert (
+ headers['X-Servlet-Path'] == '/catalog/racecar.bop'
+ ), '#7 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#7 path info'
- assert 'X-Reply' not in self.get()['headers'], 'get header empty'
+ headers = client.get(url='/index.bop')['headers']
- def test_java_application_get_headers(self):
- self.load('get_headers')
+ assert headers['X-Id'] == 'servlet4', '#8 servlet4 request'
+ assert headers['X-Request-URI'] == '/index.bop', '#8 request URI'
+ assert headers['X-Servlet-Path'] == '/index.bop', '#8 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#8 path info'
- headers = self.get(
- headers={
- 'X-Header': ['blah', 'blah'],
- 'Content-Type': 'text/html',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )['headers']
+ headers = client.get(url='/foo/baz')['headers']
- assert headers['X-Reply-0'] == 'blah', 'get headers'
- assert headers['X-Reply-1'] == 'blah', 'get headers 2'
+ assert headers['X-Id'] == 'servlet0', '#9 servlet0 request'
+ assert headers['X-Request-URI'] == '/foo/baz', '#9 request URI'
+ assert headers['X-Servlet-Path'] == '/foo', '#9 servlet path'
+ assert headers['X-Path-Info'] == '/baz', '#9 path info'
- def test_java_application_get_headers_empty(self):
- self.load('get_headers')
+ headers = client.get()['headers']
- assert 'X-Reply-0' not in self.get()['headers'], 'get headers empty'
+ assert headers['X-Id'] == 'default', '#10 default request'
+ assert headers['X-Request-URI'] == '/', '#10 request URI'
+ assert headers['X-Servlet-Path'] == '/', '#10 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#10 path info'
- def test_java_application_get_header_names(self):
- self.load('get_header_names')
+ headers = client.get(url='/index.bop/')['headers']
- headers = self.get()['headers']
+ assert headers['X-Id'] == 'default', '#11 default request'
+ assert headers['X-Request-URI'] == '/index.bop/', '#11 request URI'
+ assert headers['X-Servlet-Path'] == '/index.bop/', '#11 servlet path'
+ assert headers['X-Path-Info'] == 'null', '#11 path info'
- assert re.search(
- r'(?:Host|Connection)', headers['X-Reply-0']
- ), 'get header names'
- assert re.search(
- r'(?:Host|Connection)', headers['X-Reply-1']
- ), 'get header names 2'
- assert (
- headers['X-Reply-0'] != headers['X-Reply-1']
- ), 'get header names not equal'
- def test_java_application_header_int(self):
- self.load('header_int')
+def test_java_application_header():
+ client.load('header')
+
+ headers = client.get()['headers']
+
+ assert headers['X-Set-Utf8-Value'] == '????', 'set Utf8 header value'
+ assert headers['X-Set-Utf8-Name-???'] == 'x', 'set Utf8 header name'
+ assert headers['X-Add-Utf8-Value'] == '????', 'add Utf8 header value'
+ assert headers['X-Add-Utf8-Name-???'] == 'y', 'add Utf8 header name'
+ assert headers['X-Add-Test'] == 'v1', 'add null header'
+ assert 'X-Set-Test1' not in headers, 'set null header'
+ assert headers['X-Set-Test2'] == '', 'set empty header'
+
+
+def test_java_application_content_type():
+ client.load('content_type')
+
+ headers = client.get(url='/1')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=utf-8'
+ ), '#1 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=utf-8'
+ ), '#1 response Content-Type'
+ assert headers['X-Character-Encoding'] == 'utf-8', '#1 response charset'
+
+ headers = client.get(url='/2')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=iso-8859-1'
+ ), '#2 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=iso-8859-1'
+ ), '#2 response Content-Type'
+ assert (
+ headers['X-Character-Encoding'] == 'iso-8859-1'
+ ), '#2 response charset'
+
+ headers = client.get(url='/3')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=windows-1251'
+ ), '#3 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=windows-1251'
+ ), '#3 response Content-Type'
+ assert (
+ headers['X-Character-Encoding'] == 'windows-1251'
+ ), '#3 response charset'
+
+ headers = client.get(url='/4')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=windows-1251'
+ ), '#4 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=windows-1251'
+ ), '#4 response Content-Type'
+ assert (
+ headers['X-Character-Encoding'] == 'windows-1251'
+ ), '#4 response charset'
+
+ headers = client.get(url='/5')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=iso-8859-1'
+ ), '#5 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=iso-8859-1'
+ ), '#5 response Content-Type'
+ assert (
+ headers['X-Character-Encoding'] == 'iso-8859-1'
+ ), '#5 response charset'
+
+ headers = client.get(url='/6')['headers']
+
+ assert 'Content-Type' not in headers, '#6 no Content-Type header'
+ assert 'X-Content-Type' not in headers, '#6 no response Content-Type'
+ assert headers['X-Character-Encoding'] == 'utf-8', '#6 response charset'
+
+ headers = client.get(url='/7')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/plain;charset=utf-8'
+ ), '#7 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/plain;charset=utf-8'
+ ), '#7 response Content-Type'
+ assert headers['X-Character-Encoding'] == 'utf-8', '#7 response charset'
+
+ headers = client.get(url='/8')['headers']
+
+ assert (
+ headers['Content-Type'] == 'text/html;charset=utf-8'
+ ), '#8 Content-Type header'
+ assert (
+ headers['X-Content-Type'] == 'text/html;charset=utf-8'
+ ), '#8 response Content-Type'
+ assert headers['X-Character-Encoding'] == 'utf-8', '#8 response charset'
+
+
+def test_java_application_welcome_files():
+ client.load('welcome_files')
+
+ headers = client.get()['headers']
+
+ resp = client.get(url='/dir1')
+
+ assert resp['status'] == 302, 'dir redirect expected'
+
+ resp = client.get(url='/dir1/')
+
+ assert 'This is index.txt.' in resp['body'], 'dir1 index body'
+ assert resp['headers']['X-TXT-Filter'] == '1', 'TXT Filter header'
+
+ headers = client.get(url='/dir2/')['headers']
+
+ assert headers['X-Unit-JSP'] == 'ok', 'JSP Ok header'
+ assert headers['X-JSP-Filter'] == '1', 'JSP Filter header'
+
+ headers = client.get(url='/dir3/')['headers']
+
+ assert headers['X-App-Servlet'] == '1', 'URL pattern overrides welcome file'
+
+ headers = client.get(url='/dir4/')['headers']
+
+ assert 'X-App-Servlet' not in headers, 'Static welcome file served first'
+
+ headers = client.get(url='/dir5/')['headers']
+
+ assert (
+ headers['X-App-Servlet'] == '1'
+ ), 'Servlet for welcome file served when no static file found'
+
+
+def test_java_application_request_listeners():
+ client.load('request_listeners')
+
+ headers = client.get(url='/test1')['headers']
+
+ assert (
+ headers['X-Request-Initialized'] == '/test1'
+ ), 'request initialized event'
+ assert headers['X-Request-Destroyed'] == '', 'request destroyed event'
+ assert headers['X-Attr-Added'] == '', 'attribute added event'
+ assert headers['X-Attr-Removed'] == '', 'attribute removed event'
+ assert headers['X-Attr-Replaced'] == '', 'attribute replaced event'
+
+ headers = client.get(url='/test2?var1=1')['headers']
+
+ assert (
+ headers['X-Request-Initialized'] == '/test2'
+ ), 'request initialized event'
+ assert headers['X-Request-Destroyed'] == '/test1', 'request destroyed event'
+ assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
+ assert headers['X-Attr-Removed'] == 'var=1;', 'attribute removed event'
+ assert headers['X-Attr-Replaced'] == '', 'attribute replaced event'
+
+ headers = client.get(url='/test3?var1=1&var2=2')['headers']
+
+ assert (
+ headers['X-Request-Initialized'] == '/test3'
+ ), 'request initialized event'
+ assert headers['X-Request-Destroyed'] == '/test2', 'request destroyed event'
+ assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
+ assert headers['X-Attr-Removed'] == 'var=2;', 'attribute removed event'
+ assert headers['X-Attr-Replaced'] == 'var=1;', 'attribute replaced event'
+
+ headers = client.get(url='/test4?var1=1&var2=2&var3=3')['headers']
+
+ assert (
+ headers['X-Request-Initialized'] == '/test4'
+ ), 'request initialized event'
+ assert headers['X-Request-Destroyed'] == '/test3', 'request destroyed event'
+ assert headers['X-Attr-Added'] == 'var=1;', 'attribute added event'
+ assert headers['X-Attr-Removed'] == '', 'attribute removed event'
+ assert (
+ headers['X-Attr-Replaced'] == 'var=1;var=2;'
+ ), 'attribute replaced event'
+
+
+def test_java_application_request_uri_forward():
+ client.load('forward')
+
+ resp = client.get(
+ url='/fwd?uri=%2Fdata%2Ftest%3Furi%3Dnew_uri%26a%3D2%26b%3D3&a=1&c=4'
+ )
+ headers = resp['headers']
+
+ assert headers['X-REQUEST-Id'] == 'fwd', 'initial request servlet mapping'
+ assert (
+ headers['X-Forward-To'] == '/data/test?uri=new_uri&a=2&b=3'
+ ), 'forwarding triggered'
+ assert (
+ headers['X-REQUEST-Param-uri'] == '/data/test?uri=new_uri&a=2&b=3'
+ ), 'original uri parameter'
+ assert headers['X-REQUEST-Param-a'] == '1', 'original a parameter'
+ assert headers['X-REQUEST-Param-c'] == '4', 'original c parameter'
+
+ assert headers['X-FORWARD-Id'] == 'data', 'forward request servlet mapping'
+ assert (
+ headers['X-FORWARD-Request-URI'] == '/data/test'
+ ), 'forward request uri'
+ assert (
+ headers['X-FORWARD-Servlet-Path'] == '/data'
+ ), 'forward request servlet path'
+ assert (
+ headers['X-FORWARD-Path-Info'] == '/test'
+ ), 'forward request path info'
+ assert (
+ headers['X-FORWARD-Query-String'] == 'uri=new_uri&a=2&b=3'
+ ), 'forward request query string'
+ assert (
+ headers['X-FORWARD-Param-uri']
+ == 'new_uri,/data/test?uri=new_uri&a=2&b=3'
+ ), 'forward uri parameter'
+ assert headers['X-FORWARD-Param-a'] == '2,1', 'forward a parameter'
+ assert headers['X-FORWARD-Param-b'] == '3', 'forward b parameter'
+ assert headers['X-FORWARD-Param-c'] == '4', 'forward c parameter'
+
+ assert (
+ headers['X-javax.servlet.forward.request_uri'] == '/fwd'
+ ), 'original request uri'
+ assert (
+ headers['X-javax.servlet.forward.context_path'] == ''
+ ), 'original request context path'
+ assert (
+ headers['X-javax.servlet.forward.servlet_path'] == '/fwd'
+ ), 'original request servlet path'
+ assert (
+ headers['X-javax.servlet.forward.path_info'] == 'null'
+ ), 'original request path info'
+ assert (
+ headers['X-javax.servlet.forward.query_string']
+ == 'uri=%2Fdata%2Ftest%3Furi%3Dnew_uri%26a%3D2%26b%3D3&a=1&c=4'
+ ), 'original request query'
+
+ assert (
+ 'Before forwarding' not in resp['body']
+ ), 'discarded data added before forward() call'
+ assert (
+ 'X-After-Forwarding' not in headers
+ ), 'cannot add headers after forward() call'
+ assert (
+ 'After forwarding' not in resp['body']
+ ), 'cannot add data after forward() call'
+
+
+def test_java_application_named_dispatcher_forward():
+ client.load('forward')
+
+ resp = client.get(url='/fwd?disp=name&uri=data')
+ headers = resp['headers']
+
+ assert headers['X-REQUEST-Id'] == 'fwd', 'initial request servlet mapping'
+ assert headers['X-Forward-To'] == 'data', 'forwarding triggered'
+
+ assert headers['X-FORWARD-Id'] == 'data', 'forward request servlet mapping'
+ assert headers['X-FORWARD-Request-URI'] == '/fwd', 'forward request uri'
+ assert (
+ headers['X-FORWARD-Servlet-Path'] == '/fwd'
+ ), 'forward request servlet path'
+ assert headers['X-FORWARD-Path-Info'] == 'null', 'forward request path info'
+ assert (
+ headers['X-FORWARD-Query-String'] == 'disp=name&uri=data'
+ ), 'forward request query string'
+
+ assert (
+ headers['X-javax.servlet.forward.request_uri'] == 'null'
+ ), 'original request uri'
+ assert (
+ headers['X-javax.servlet.forward.context_path'] == 'null'
+ ), 'original request context path'
+ assert (
+ headers['X-javax.servlet.forward.servlet_path'] == 'null'
+ ), 'original request servlet path'
+ assert (
+ headers['X-javax.servlet.forward.path_info'] == 'null'
+ ), 'original request path info'
+ assert (
+ headers['X-javax.servlet.forward.query_string'] == 'null'
+ ), 'original request query'
+
+ assert (
+ 'Before forwarding' not in resp['body']
+ ), 'discarded data added before forward() call'
+ assert (
+ 'X-After-Forwarding' not in headers
+ ), 'cannot add headers after forward() call'
+ assert (
+ 'After forwarding' not in resp['body']
+ ), 'cannot add data after forward() call'
+
+
+def test_java_application_request_uri_include():
+ client.load('include')
+
+ resp = client.get(url='/inc?uri=/data/test')
+ headers = resp['headers']
+ body = resp['body']
+
+ assert headers['X-REQUEST-Id'] == 'inc', 'initial request servlet mapping'
+ assert headers['X-Include'] == '/data/test', 'including triggered'
+
+ assert (
+ 'X-INCLUDE-Id' not in headers
+ ), 'unable to add headers in include request'
+
+ assert (
+ 'javax.servlet.include.request_uri: /data/test' in body
+ ), 'include request uri'
+ # assert (
+ # 'javax.servlet.include.context_path: ' in body
+ # ) == True, 'include request context path'
+ assert (
+ 'javax.servlet.include.servlet_path: /data' in body
+ ), 'include request servlet path'
+ assert (
+ 'javax.servlet.include.path_info: /test' in body
+ ), 'include request path info'
+ assert (
+ 'javax.servlet.include.query_string: null' in body
+ ), 'include request query'
+
+ assert 'Before include' in body, 'preserve data added before include() call'
+ assert (
+ headers['X-After-Include'] == 'you-should-see-this'
+ ), 'add headers after include() call'
+ assert 'After include' in body, 'add data after include() call'
+
+
+def test_java_application_named_dispatcher_include():
+ client.load('include')
+
+ resp = client.get(url='/inc?disp=name&uri=data')
+ headers = resp['headers']
+ body = resp['body']
+
+ assert headers['X-REQUEST-Id'] == 'inc', 'initial request servlet mapping'
+ assert headers['X-Include'] == 'data', 'including triggered'
+
+ assert (
+ 'X-INCLUDE-Id' not in headers
+ ), 'unable to add headers in include request'
+
+ assert (
+ 'javax.servlet.include.request_uri: null' in body
+ ), 'include request uri'
+ # assert (
+ # 'javax.servlet.include.context_path: null' in body
+ # ) == True, 'include request context path'
+ assert (
+ 'javax.servlet.include.servlet_path: null' in body
+ ), 'include request servlet path'
+ assert (
+ 'javax.servlet.include.path_info: null' in body
+ ), 'include request path info'
+ assert (
+ 'javax.servlet.include.query_string: null' in body
+ ), 'include request query'
+
+ assert 'Before include' in body, 'preserve data added before include() call'
+ assert (
+ headers['X-After-Include'] == 'you-should-see-this'
+ ), 'add headers after include() call'
+ assert 'After include' in body, 'add data after include() call'
+
+
+def test_java_application_path_translation():
+ client.load('path_translation')
+
+ headers = client.get(url='/pt/test?path=/')['headers']
+
+ assert headers['X-Servlet-Path'] == '/pt', 'matched servlet path'
+ assert headers['X-Path-Info'] == '/test', 'the rest of the path'
+ assert (
+ headers['X-Path-Translated']
+ == f"{headers['X-Real-Path']}{headers['X-Path-Info']}"
+ ), 'translated path is the app root + path info'
+ assert headers['X-Resource-Paths'].endswith(
+ '/WEB-INF/, /index.html]'
+ ), 'app root directory content'
+ assert (
+ headers['X-Resource-As-Stream'] == 'null'
+ ), 'no resource stream for root path'
+
+ headers = client.get(url='/test?path=/none')['headers']
+
+ assert headers['X-Servlet-Path'] == '/test', 'matched whole path'
+ assert (
+ headers['X-Path-Info'] == 'null'
+ ), 'the rest of the path is null, whole path matched'
+ assert (
+ headers['X-Path-Translated'] == 'null'
+ ), 'translated path is null because path info is null'
+ assert headers['X-Real-Path'].endswith('/none'), 'read path is not null'
+ assert headers['X-Resource-Paths'] == 'null', 'no resource found'
+ assert headers['X-Resource-As-Stream'] == 'null', 'no resource stream'
+
+
+def test_java_application_query_string():
+ client.load('query_string')
+
+ assert (
+ client.get(url='/?a=b')['headers']['X-Query-String'] == 'a=b'
+ ), 'query string'
+
+
+def test_java_application_query_empty():
+ client.load('query_string')
+
+ assert (
+ client.get(url='/?')['headers']['X-Query-String'] == ''
+ ), 'query string empty'
+
+
+def test_java_application_query_absent():
+ client.load('query_string')
+
+ assert (
+ client.get()['headers']['X-Query-String'] == 'null'
+ ), 'query string absent'
- headers = self.get(
- headers={
- 'X-Header': '2',
- 'Content-Type': 'text/html',
- 'Host': 'localhost',
- 'Connection': 'close',
- }
- )['headers']
- assert headers['X-Set-Int'] == '1', 'set int header'
- assert headers['X-Get-Int'] == '2', 'get int header'
+def test_java_application_empty():
+ client.load('empty')
- def test_java_application_header_date(self):
- self.load('header_date')
+ assert client.get()['status'] == 200, 'empty'
- date = 'Fri, 15 Mar 2019 14:45:34 GMT'
- headers = self.get(
+def test_java_application_keepalive_body():
+ client.load('mirror')
+
+ assert client.post()['status'] == 200, 'init'
+
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Connection': 'keep-alive',
+ 'Content-Type': 'text/html',
+ 'Host': 'localhost',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
+
+ assert resp['body'] == body, 'keep-alive 1'
+
+ body = '0123456789'
+ resp = client.post(
+ headers={
+ 'Connection': 'close',
+ 'Content-Type': 'text/html',
+ 'Host': 'localhost',
+ },
+ sock=sock,
+ body=body,
+ )
+
+ assert resp['body'] == body, 'keep-alive 2'
+
+
+def test_java_application_http_10():
+ client.load('empty')
+
+ assert client.get(http_10=True)['status'] == 200, 'HTTP 1.0'
+
+
+def test_java_application_no_method():
+ client.load('empty')
+
+ assert client.post()['status'] == 405, 'no method'
+
+
+def test_java_application_get_header():
+ client.load('get_header')
+
+ assert (
+ client.get(
headers={
- 'X-Header': date,
+ 'X-Header': 'blah',
'Content-Type': 'text/html',
'Host': 'localhost',
'Connection': 'close',
}
- )['headers']
-
- assert (
- headers['X-Set-Date'] == 'Thu, 01 Jan 1970 00:00:01 GMT'
- ), 'set date header'
- assert headers['X-Get-Date'] == date, 'get date header'
-
- def test_java_application_multipart(self, temp_dir):
- self.load('multipart')
-
- reldst = '/uploads'
- fulldst = f'{temp_dir}{reldst}'
- os.mkdir(fulldst)
- public_dir(fulldst)
-
- fields = {
- 'file': {
- 'filename': 'sample.txt',
- 'type': 'text/plain',
- 'data': io.StringIO('Data from sample file'),
- },
- 'destination': fulldst,
- 'upload': 'Upload',
+ )['headers']['X-Reply']
+ == 'blah'
+ ), 'get header'
+
+
+def test_java_application_get_header_empty():
+ client.load('get_header')
+
+ assert 'X-Reply' not in client.get()['headers'], 'get header empty'
+
+
+def test_java_application_get_headers():
+ client.load('get_headers')
+
+ headers = client.get(
+ headers={
+ 'X-Header': ['blah', 'blah'],
+ 'Content-Type': 'text/html',
+ 'Host': 'localhost',
+ 'Connection': 'close',
}
+ )['headers']
- encoded, content_type = self.multipart_encode(fields)
+ assert headers['X-Reply-0'] == 'blah', 'get headers'
+ assert headers['X-Reply-1'] == 'blah', 'get headers 2'
- preamble = 'Preamble. Should be ignored.'
- epilogue = 'Epilogue. Should be ignored.'
- body = "%s\r\n%s\r\n%s" % (preamble, encoded.decode(), epilogue)
- resp = self.post(
- headers={
- 'Content-Type': content_type,
- 'Host': 'localhost',
- 'Connection': 'close',
- },
- body=body,
- )
+def test_java_application_get_headers_empty():
+ client.load('get_headers')
+
+ assert 'X-Reply-0' not in client.get()['headers'], 'get headers empty'
+
+
+def test_java_application_get_header_names():
+ client.load('get_header_names')
+
+ headers = client.get()['headers']
+
+ assert re.search(
+ r'(?:Host|Connection)', headers['X-Reply-0']
+ ), 'get header names'
+ assert re.search(
+ r'(?:Host|Connection)', headers['X-Reply-1']
+ ), 'get header names 2'
+ assert (
+ headers['X-Reply-0'] != headers['X-Reply-1']
+ ), 'get header names not equal'
- assert resp['status'] == 200, 'multipart status'
- assert re.search(r'sample\.txt created', resp['body']), 'multipart body'
- assert (
- self.search_in_log(
- r'^Data from sample file$', name=f'{reldst}/sample.txt'
- )
- is not None
- ), 'file created'
- def test_java_application_threads(self):
- self.load('threads')
+def test_java_application_header_int():
+ client.load('header_int')
+
+ headers = client.get(
+ headers={
+ 'X-Header': '2',
+ 'Content-Type': 'text/html',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )['headers']
+
+ assert headers['X-Set-Int'] == '1', 'set int header'
+ assert headers['X-Get-Int'] == '2', 'get int header'
- assert 'success' in self.conf(
- '4', 'applications/threads/threads'
- ), 'configure 4 threads'
- socks = []
+def test_java_application_header_date():
+ client.load('header_date')
- for i in range(4):
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+ date = 'Fri, 15 Mar 2019 14:45:34 GMT'
+
+ headers = client.get(
+ headers={
+ 'X-Header': date,
+ 'Content-Type': 'text/html',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )['headers']
+
+ assert (
+ headers['X-Set-Date'] == 'Thu, 01 Jan 1970 00:00:01 GMT'
+ ), 'set date header'
+ assert headers['X-Get-Date'] == date, 'get date header'
+
+
+def test_java_application_multipart(search_in_file, temp_dir):
+ client.load('multipart')
+
+ reldst = '/uploads'
+ fulldst = f'{temp_dir}{reldst}'
+ os.mkdir(fulldst)
+ public_dir(fulldst)
+
+ fields = {
+ 'file': {
+ 'filename': 'sample.txt',
+ 'type': 'text/plain',
+ 'data': io.StringIO('Data from sample file'),
+ },
+ 'destination': fulldst,
+ 'upload': 'Upload',
+ }
+
+ encoded, content_type = client.multipart_encode(fields)
+
+ preamble = 'Preamble. Should be ignored.'
+ epilogue = 'Epilogue. Should be ignored.'
+ body = f'{preamble}\r\n{encoded.decode()}\r\n{epilogue}'
+
+ resp = client.post(
+ headers={
+ 'Content-Type': content_type,
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ },
+ body=body,
+ )
+
+ assert resp['status'] == 200, 'multipart status'
+ assert re.search(r'sample\.txt created', resp['body']), 'multipart body'
+ assert (
+ search_in_file(r'^Data from sample file$', name=f'{reldst}/sample.txt')
+ is not None
+ ), 'file created'
+
+
+def test_java_application_threads():
+ client.load('threads')
+
+ assert 'success' in client.conf(
+ '4', 'applications/threads/threads'
+ ), 'configure 4 threads'
+
+ socks = []
+
+ for _ in range(4):
+ sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
- socks.append(sock)
+ socks.append(sock)
- time.sleep(0.25) # required to avoid greedy request reading
+ time.sleep(0.25) # required to avoid greedy request reading
- threads = set()
+ threads = set()
- for sock in socks:
- resp = self.recvall(sock).decode('utf-8')
+ for sock in socks:
+ resp = client.recvall(sock).decode('utf-8')
- self.log_in(resp)
+ client.log_in(resp)
- resp = self._resp_to_dict(resp)
+ resp = client._resp_to_dict(resp)
- assert resp['status'] == 200, 'status'
+ assert resp['status'] == 200, 'status'
- threads.add(resp['headers']['X-Thread'])
+ threads.add(resp['headers']['X-Thread'])
- sock.close()
+ sock.close()
- assert len(socks) == len(threads), 'threads differs'
+ assert len(socks) == len(threads), 'threads differs'
diff --git a/test/test_java_isolation_rootfs.py b/test/test_java_isolation_rootfs.py
index 28668997..66b2a81e 100644
--- a/test/test_java_isolation_rootfs.py
+++ b/test/test_java_isolation_rootfs.py
@@ -2,74 +2,65 @@ import os
import subprocess
import pytest
-from unit.applications.lang.java import TestApplicationJava
+from unit.applications.lang.java import ApplicationJava
from unit.option import option
+prerequisites = {'modules': {'java': 'all'}, 'privileged_user': True}
-class TestJavaIsolationRootfs(TestApplicationJava):
- prerequisites = {'modules': {'java': 'all'}}
+client = ApplicationJava()
- def setup_method(self, is_su):
- if not is_su:
- pytest.skip('require root')
- os.makedirs(f'{option.temp_dir}/jars')
- os.makedirs(f'{option.temp_dir}/tmp')
- os.chmod(f'{option.temp_dir}/tmp', 0o777)
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/jars')
+ os.makedirs(f'{temp_dir}/tmp')
+ os.chmod(f'{temp_dir}/tmp', 0o777)
- try:
- subprocess.run(
- [
- "mount",
- "--bind",
- f'{option.current_dir}/build',
- f'{option.temp_dir}/jars',
- ],
- stderr=subprocess.STDOUT,
- )
-
- except KeyboardInterrupt:
- raise
+ try:
+ subprocess.run(
+ [
+ "mount",
+ "--bind",
+ f'{option.current_dir}/build',
+ f'{temp_dir}/jars',
+ ],
+ stderr=subprocess.STDOUT,
+ )
- except subprocess.CalledProcessError:
- pytest.fail("Can't run mount process.")
+ except KeyboardInterrupt:
+ raise
- def teardown_method(self, is_su):
- if not is_su:
- return
+ except subprocess.CalledProcessError:
+ pytest.fail("Can't run mount process.")
- try:
- subprocess.run(
- ["umount", "--lazy", f"{option.temp_dir}/jars"],
- stderr=subprocess.STDOUT,
- )
+ yield
- except KeyboardInterrupt:
- raise
+ try:
+ subprocess.run(
+ ["umount", "--lazy", f"{option.temp_dir}/jars"],
+ stderr=subprocess.STDOUT,
+ )
- except subprocess.CalledProcessError:
- pytest.fail("Can't run umount process.")
+ except KeyboardInterrupt:
+ raise
- def test_java_isolation_rootfs_chroot_war(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('require root')
+ except subprocess.CalledProcessError:
+ pytest.fail("Can't run umount process.")
- isolation = {
- 'rootfs': temp_dir,
- }
- self.load('empty_war', isolation=isolation)
+def test_java_isolation_rootfs_chroot_war(temp_dir):
+ client.load('empty_war', isolation={'rootfs': temp_dir})
- assert 'success' in self.conf(
- '"/"',
- '/config/applications/empty_war/working_directory',
- )
+ assert 'success' in client.conf(
+ '"/"',
+ '/config/applications/empty_war/working_directory',
+ )
- assert 'success' in self.conf(
- '"/jars"', 'applications/empty_war/unit_jars'
- )
- assert 'success' in self.conf(
- '"/java/empty.war"', 'applications/empty_war/webapp'
- )
+ assert 'success' in client.conf(
+ '"/jars"', 'applications/empty_war/unit_jars'
+ )
+ assert 'success' in client.conf(
+ '"/java/empty.war"', 'applications/empty_war/webapp'
+ )
- assert self.get()['status'] == 200, 'war'
+ assert client.get()['status'] == 200, 'war'
diff --git a/test/test_java_websockets.py b/test/test_java_websockets.py
index 8de45a06..c323830b 100644
--- a/test/test_java_websockets.py
+++ b/test/test_java_websockets.py
@@ -2,1408 +2,1413 @@ import struct
import time
import pytest
-from unit.applications.lang.java import TestApplicationJava
-from unit.applications.websockets import TestApplicationWebsocket
-from unit.option import option
+from unit.applications.lang.java import ApplicationJava
+from unit.applications.websockets import ApplicationWebsocket
+prerequisites = {'modules': {'java': 'any'}}
-class TestJavaWebsockets(TestApplicationJava):
- prerequisites = {'modules': {'java': 'any'}}
+client = ApplicationJava()
+ws = ApplicationWebsocket()
- ws = TestApplicationWebsocket()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
- ), 'clear keepalive_interval'
+@pytest.fixture(autouse=True)
+def setup_method_fixture(skip_alert):
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
+ ), 'clear keepalive_interval'
- skip_alert(r'socket close\(\d+\) failed')
+ skip_alert(r'socket close\(\d+\) failed')
- def close_connection(self, sock):
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+def close_connection(sock):
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- def check_close(self, sock, code=1000, no_close=False, frame=None):
- if frame == None:
- frame = self.ws.frame_read(sock)
+ check_close(sock)
- assert frame['fin'] == True, 'close fin'
- assert frame['opcode'] == self.ws.OP_CLOSE, 'close opcode'
- assert frame['code'] == code, 'close code'
- if not no_close:
- sock.close()
+def check_close(sock, code=1000, no_close=False, frame=None):
+ if frame is None:
+ frame = ws.frame_read(sock)
- def check_frame(self, frame, fin, opcode, payload, decode=True):
- if opcode == self.ws.OP_BINARY or not decode:
- data = frame['data']
- else:
- data = frame['data'].decode('utf-8')
+ assert frame['fin'], 'close fin'
+ assert frame['opcode'] == ws.OP_CLOSE, 'close opcode'
+ assert frame['code'] == code, 'close code'
- assert frame['fin'] == fin, 'fin'
- assert frame['opcode'] == opcode, 'opcode'
- assert data == payload, 'payload'
+ if not no_close:
+ sock.close()
- def test_java_websockets_handshake(self):
- self.load('websockets_mirror')
- resp, sock, key = self.ws.upgrade()
- sock.close()
+def check_frame(frame, fin, opcode, payload, decode=True):
+ if opcode == ws.OP_BINARY or not decode:
+ data = frame['data']
+ else:
+ data = frame['data'].decode('utf-8')
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+ assert frame['fin'] == fin, 'fin'
+ assert frame['opcode'] == opcode, 'opcode'
+ assert data == payload, 'payload'
- def test_java_websockets_mirror(self):
- self.load('websockets_mirror')
- message = 'blah'
+def test_java_websockets_handshake():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ resp, sock, key = ws.upgrade()
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- assert message == frame['data'].decode('utf-8'), 'mirror'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+def test_java_websockets_mirror():
+ client.load('websockets_mirror')
- assert message == frame['data'].decode('utf-8'), 'mirror 2'
+ message = 'blah'
- sock.close()
+ _, sock, _ = ws.upgrade()
- def test_java_websockets_no_mask(self):
- self.load('websockets_mirror')
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- message = 'blah'
+ assert message == frame['data'].decode('utf-8'), 'mirror'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, mask=False)
+ assert message == frame['data'].decode('utf-8'), 'mirror 2'
- frame = self.ws.frame_read(sock)
+ sock.close()
- assert frame['opcode'] == self.ws.OP_CLOSE, 'no mask opcode'
- assert frame['code'] == 1002, 'no mask close code'
- sock.close()
+def test_java_websockets_no_mask():
+ client.load('websockets_mirror')
- def test_java_websockets_fragmentation(self):
- self.load('websockets_mirror')
+ message = 'blah'
- message = 'blah'
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, message, mask=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, ' ', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, message)
+ frame = ws.frame_read(sock)
- frame = self.ws.frame_read(sock)
+ assert frame['opcode'] == ws.OP_CLOSE, 'no mask opcode'
+ assert frame['code'] == 1002, 'no mask close code'
- assert f'{message} {message}' == frame['data'].decode(
- 'utf-8'
- ), 'mirror framing'
+ sock.close()
- sock.close()
- def test_java_websockets_frame_fragmentation_invalid(self):
- self.load('websockets_mirror')
+def test_java_websockets_fragmentation():
+ client.load('websockets_mirror')
- message = 'blah'
+ message = 'blah'
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PING, message, fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, message, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, ' ', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, message)
- frame = self.ws.frame_read(sock)
+ frame = ws.frame_read(sock)
- frame.pop('data')
- assert frame == {
- 'fin': True,
- 'rsv1': False,
- 'rsv2': False,
- 'rsv3': False,
- 'opcode': self.ws.OP_CLOSE,
- 'mask': 0,
- 'code': 1002,
- 'reason': 'Fragmented control frame',
- }, 'close frame'
+ assert f'{message} {message}' == frame['data'].decode(
+ 'utf-8'
+ ), 'mirror framing'
- sock.close()
+ sock.close()
- def test_java_websockets_two_clients(self):
- self.load('websockets_mirror')
- message1 = 'blah1'
- message2 = 'blah2'
+def test_java_websockets_frame_fragmentation_invalid():
+ client.load('websockets_mirror')
- _, sock1, _ = self.ws.upgrade()
- _, sock2, _ = self.ws.upgrade()
+ message = 'blah'
- self.ws.frame_write(sock1, self.ws.OP_TEXT, message1)
- self.ws.frame_write(sock2, self.ws.OP_TEXT, message2)
+ _, sock, _ = ws.upgrade()
- frame1 = self.ws.frame_read(sock1)
- frame2 = self.ws.frame_read(sock2)
+ ws.frame_write(sock, ws.OP_PING, message, fin=False)
- assert message1 == frame1['data'].decode('utf-8'), 'client 1'
- assert message2 == frame2['data'].decode('utf-8'), 'client 2'
+ frame = ws.frame_read(sock)
- sock1.close()
- sock2.close()
+ frame.pop('data')
+ assert frame == {
+ 'fin': True,
+ 'rsv1': False,
+ 'rsv2': False,
+ 'rsv3': False,
+ 'opcode': ws.OP_CLOSE,
+ 'mask': 0,
+ 'code': 1002,
+ 'reason': 'Fragmented control frame',
+ }, 'close frame'
- @pytest.mark.skip('not yet')
- def test_java_websockets_handshake_upgrade_absent(
- self,
- ): # FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
- self.load('websockets_mirror')
+ sock.close()
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
- assert resp['status'] == 400, 'upgrade absent'
+def test_java_websockets_two_clients():
+ client.load('websockets_mirror')
- def test_java_websockets_handshake_case_insensitive(self):
- self.load('websockets_mirror')
+ message1 = 'blah1'
+ message2 = 'blah2'
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'WEBSOCKET',
- 'Connection': 'UPGRADE',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
+ _, sock1, _ = ws.upgrade()
+ _, sock2, _ = ws.upgrade()
- assert resp['status'] == 101, 'status'
-
- @pytest.mark.skip('not yet')
- def test_java_websockets_handshake_connection_absent(self): # FAIL
- self.load('websockets_mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_java_websockets_handshake_version_absent(self):
- self.load('websockets_mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- },
- )
-
- assert resp['status'] == 426, 'status'
-
- @pytest.mark.skip('not yet')
- def test_java_websockets_handshake_key_invalid(self):
- self.load('websockets_mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': '!',
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'key length'
-
- key = self.ws.key()
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': [key, key],
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert (
- resp['status'] == 400
- ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
-
- def test_java_websockets_handshake_method_invalid(self):
- self.load('websockets_mirror')
-
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_java_websockets_handshake_http_10(self):
- self.load('websockets_mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- http_10=True,
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_java_websockets_handshake_uri_invalid(self):
- self.load('websockets_mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- url='!',
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_java_websockets_protocol_absent(self):
- self.load('websockets_mirror')
-
- key = self.ws.key()
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': key,
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
+ ws.frame_write(sock1, ws.OP_TEXT, message1)
+ ws.frame_write(sock2, ws.OP_TEXT, message2)
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+ frame1 = ws.frame_read(sock1)
+ frame2 = ws.frame_read(sock2)
- # autobahn-testsuite
- #
- # Some following tests fail because of Unit does not support UTF-8
- # validation for websocket frames. It should be implemented
- # by application, if necessary.
+ assert message1 == frame1['data'].decode('utf-8'), 'client 1'
+ assert message2 == frame2['data'].decode('utf-8'), 'client 2'
- def test_java_websockets_1_1_1__1_1_8(self):
- self.load('websockets_mirror')
+ sock1.close()
+ sock2.close()
- opcode = self.ws.OP_TEXT
- _, sock, _ = self.ws.upgrade()
+# FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
+@pytest.mark.skip('not yet')
+def test_java_websockets_handshake_upgrade_absent():
+ client.load('websockets_mirror')
- def check_length(length, chopsize=None):
- payload = '*' * length
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ assert resp['status'] == 400, 'upgrade absent'
- frame = self.ws.message_read(sock)
- self.check_frame(frame, True, opcode, payload)
- check_length(0) # 1_1_1
- check_length(125) # 1_1_2
- check_length(126) # 1_1_3
- check_length(127) # 1_1_4
- check_length(128) # 1_1_5
- check_length(65535) # 1_1_6
- check_length(65536) # 1_1_7
- check_length(65536, chopsize=997) # 1_1_8
+def test_java_websockets_handshake_case_insensitive():
+ client.load('websockets_mirror')
- self.close_connection(sock)
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'WEBSOCKET',
+ 'Connection': 'UPGRADE',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
- def test_java_websockets_1_2_1__1_2_8(self):
- self.load('websockets_mirror')
+ assert resp['status'] == 101, 'status'
- opcode = self.ws.OP_BINARY
- _, sock, _ = self.ws.upgrade()
+@pytest.mark.skip('not yet')
+def test_java_websockets_handshake_connection_absent(): # FAIL
+ client.load('websockets_mirror')
- def check_length(length, chopsize=None):
- payload = b'\xfe' * length
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_java_websockets_handshake_version_absent():
+ client.load('websockets_mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ },
+ )
+
+ assert resp['status'] == 426, 'status'
+
+
+@pytest.mark.skip('not yet')
+def test_java_websockets_handshake_key_invalid():
+ client.load('websockets_mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': '!',
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'key length'
+
+ key = ws.key()
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': [key, key],
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert (
+ resp['status'] == 400
+ ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
+
+
+def test_java_websockets_handshake_method_invalid():
+ client.load('websockets_mirror')
+
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_java_websockets_handshake_http_10():
+ client.load('websockets_mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ http_10=True,
+ )
+
+ assert resp['status'] == 400, 'status'
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.message_read(sock)
- self.check_frame(frame, True, opcode, payload)
+def test_java_websockets_handshake_uri_invalid():
+ client.load('websockets_mirror')
- check_length(0) # 1_2_1
- check_length(125) # 1_2_2
- check_length(126) # 1_2_3
- check_length(127) # 1_2_4
- check_length(128) # 1_2_5
- check_length(65535) # 1_2_6
- check_length(65536) # 1_2_7
- check_length(65536, chopsize=997) # 1_2_8
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ url='!',
+ )
- self.close_connection(sock)
+ assert resp['status'] == 400, 'status'
- def test_java_websockets_2_1__2_6(self):
- self.load('websockets_mirror')
- op_ping = self.ws.OP_PING
- op_pong = self.ws.OP_PONG
+def test_java_websockets_protocol_absent():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ key = ws.key()
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': key,
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
- def check_ping(payload, chopsize=None, decode=True):
- self.ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- self.check_frame(frame, True, op_pong, payload, decode=decode)
- check_ping('') # 2_1
- check_ping('Hello, world!') # 2_2
- check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
- check_ping(b'\xfe' * 125, decode=False) # 2_4
- check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
+# autobahn-testsuite
+#
+# Some following tests fail because of Unit does not support UTF-8
+# validation for websocket frames. It should be implemented
+# by application, if necessary.
- self.close_connection(sock)
- # 2_5
+def test_java_websockets_1_1_1__1_1_8():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ opcode = ws.OP_TEXT
- self.ws.frame_write(sock, self.ws.OP_PING, b'\xfe' * 126)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- def test_java_websockets_2_7__2_9(self):
- self.load('websockets_mirror')
+ def check_length(length, chopsize=None):
+ payload = '*' * length
- # 2_7
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.message_read(sock)
+ check_frame(frame, True, opcode, payload)
- self.ws.frame_write(sock, self.ws.OP_PONG, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_7'
+ check_length(0) # 1_1_1
+ check_length(125) # 1_1_2
+ check_length(126) # 1_1_3
+ check_length(127) # 1_1_4
+ check_length(128) # 1_1_5
+ check_length(65535) # 1_1_6
+ check_length(65536) # 1_1_7
+ check_length(65536, chopsize=997) # 1_1_8
- # 2_8
+ close_connection(sock)
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_8'
- # 2_9
+def test_java_websockets_1_2_1__1_2_8():
+ client.load('websockets_mirror')
- payload = 'ping payload'
+ opcode = ws.OP_BINARY
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- self.ws.frame_write(sock, self.ws.OP_PING, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, payload)
+ def check_length(length, chopsize=None):
+ payload = b'\xfe' * length
- self.close_connection(sock)
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- def test_java_websockets_2_10__2_11(self):
- self.load('websockets_mirror')
+ frame = ws.message_read(sock)
+ check_frame(frame, True, opcode, payload)
- # 2_10
+ check_length(0) # 1_2_1
+ check_length(125) # 1_2_2
+ check_length(126) # 1_2_3
+ check_length(127) # 1_2_4
+ check_length(128) # 1_2_5
+ check_length(65535) # 1_2_6
+ check_length(65536) # 1_2_7
+ check_length(65536, chopsize=997) # 1_2_8
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- for i in range(0, 10):
- self.ws.frame_write(sock, self.ws.OP_PING, f'payload-{i}')
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
+def test_java_websockets_2_1__2_6():
+ client.load('websockets_mirror')
- # 2_11
+ op_ping = ws.OP_PING
+ op_pong = ws.OP_PONG
- for i in range(0, 10):
- opcode = self.ws.OP_PING
- self.ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
+ _, sock, _ = ws.upgrade()
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
+ def check_ping(payload, chopsize=None, decode=True):
+ ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
- self.close_connection(sock)
+ check_frame(frame, True, op_pong, payload, decode=decode)
- @pytest.mark.skip('not yet')
- def test_java_websockets_3_1__3_7(self):
- self.load('websockets_mirror')
+ check_ping('') # 2_1
+ check_ping('Hello, world!') # 2_2
+ check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
+ check_ping(b'\xfe' * 125, decode=False) # 2_4
+ check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
- payload = 'Hello, world!'
+ close_connection(sock)
- # 3_1
+ # 2_5
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv1=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_PING, b'\xfe' * 126)
+ check_close(sock, 1002)
- # 3_2
- _, sock, _ = self.ws.upgrade()
+def test_java_websockets_2_7__2_9():
+ client.load('websockets_mirror')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv2=True)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 2_7
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- self.check_close(sock, 1002, no_close=True)
+ ws.frame_write(sock, ws.OP_PONG, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_7'
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
- sock.close()
+ # 2_8
- # 3_3
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_8'
- _, sock, _ = self.ws.upgrade()
+ # 2_9
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ payload = 'ping payload'
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ ws.frame_write(sock, ws.OP_PING, payload)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv1=True, rsv2=True
- )
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, payload)
- self.check_close(sock, 1002, no_close=True)
+ close_connection(sock)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
- sock.close()
- # 3_4
+def test_java_websockets_2_10__2_11():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ # 2_10
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv3=True, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ for i in range(0, 10):
+ ws.frame_write(sock, ws.OP_PING, f'payload-{i}')
- self.check_close(sock, 1002, no_close=True)
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
- sock.close()
+ # 2_11
- # 3_5
+ for i in range(0, 10):
+ opcode = ws.OP_PING
+ ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
- _, sock, _ = self.ws.upgrade()
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- self.ws.frame_write(
- sock,
- self.ws.OP_BINARY,
- b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
- rsv1=True,
- rsv3=True,
- )
+ close_connection(sock)
- self.check_close(sock, 1002)
- # 3_6
+@pytest.mark.skip('not yet')
+def test_java_websockets_3_1__3_7():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ payload = 'Hello, world!'
- self.ws.frame_write(
- sock, self.ws.OP_PING, payload, rsv2=True, rsv3=True
- )
+ # 3_1
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 3_7
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True)
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 3_2
- self.ws.frame_write(
- sock, self.ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True
- )
+ _, sock, _ = ws.upgrade()
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv2=True)
+ ws.frame_write(sock, ws.OP_PING, '')
- def test_java_websockets_4_1_1__4_2_5(self):
- self.load('websockets_mirror')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- payload = 'Hello, world!'
+ check_close(sock, 1002, no_close=True)
- # 4_1_1
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 3_3
- self.ws.frame_write(sock, 0x03, '')
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_1_2
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, 0x04, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True, rsv2=True)
- # 4_1_3
+ check_close(sock, 1002, no_close=True)
- _, sock, _ = self.ws.upgrade()
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ # 3_4
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, 0x05, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv3=True, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.check_close(sock, 1002)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- # 4_1_4
+ check_close(sock, 1002, no_close=True)
- _, sock, _ = self.ws.upgrade()
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ # 3_5
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, 0x06, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ ws.frame_write(
+ sock,
+ ws.OP_BINARY,
+ b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
+ rsv1=True,
+ rsv3=True,
+ )
- self.check_close(sock, 1002)
+ check_close(sock, 1002)
- # 4_1_5
+ # 3_6
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, payload, rsv2=True, rsv3=True)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x07, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 3_7
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_1
+ ws.frame_write(sock, ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True)
- _, sock, _ = self.ws.upgrade()
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0B, '')
- self.check_close(sock, 1002)
- # 4_2_2
+def test_java_websockets_4_1_1__4_2_5():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ payload = 'Hello, world!'
- self.ws.frame_write(sock, 0x0C, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ # 4_1_1
- # 4_2_3
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, 0x03, '')
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ # 4_1_2
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, 0x0D, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ ws.frame_write(sock, 0x04, 'reserved opcode payload')
+ check_close(sock, 1002)
- self.check_close(sock, 1002)
+ # 4_1_3
- # 4_2_4
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, 0x05, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, 0x0E, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ check_close(sock, 1002)
- self.check_close(sock, 1002)
+ # 4_1_4
- # 4_2_5
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, 0x06, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, 0x0F, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ check_close(sock, 1002)
- self.check_close(sock, 1002)
+ # 4_1_5
- def test_java_websockets_5_1__5_20(self):
- self.load('websockets_mirror')
+ _, sock, _ = ws.upgrade()
- # 5_1
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, 0x07, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- # 5_2
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 4_2_1
- self.ws.frame_write(sock, self.ws.OP_PONG, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 5_3
+ ws.frame_write(sock, 0x0B, '')
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 4_2_2
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ ws.frame_write(sock, 0x0C, 'reserved opcode payload')
+ check_close(sock, 1002)
- # 5_4
+ # 4_2_3
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_4'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- # 5_5
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ ws.frame_write(sock, 0x0D, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ check_close(sock, 1002)
- # 5_6
+ # 4_2_4
- ping_payload = 'ping payload'
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ ws.frame_write(sock, 0x0E, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- # 5_7
+ check_close(sock, 1002)
- ping_payload = 'ping payload'
+ # 4_2_5
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_7'
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, 0x0F, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ check_close(sock, 1002)
- # 5_8
- ping_payload = 'ping payload'
+def test_java_websockets_5_1__5_20():
+ client.load('websockets_mirror')
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ # 5_1
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ ws.frame_write(sock, ws.OP_PING, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- # 5_9
+ # 5_2
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 5_10
+ ws.frame_write(sock, ws.OP_PONG, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 5_3
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 5_11
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=True,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ # 5_4
- # 5_12
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_4'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ # 5_5
- # 5_13
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ # 5_6
- # 5_14
+ ping_payload = 'ping payload'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=False,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- # 5_15
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_7
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment4', fin=True)
+ ping_payload = 'ping payload'
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_7'
- if frame['opcode'] == self.ws.OP_TEXT:
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
- frame = None
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
- self.check_close(sock, 1002, frame=frame)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- # 5_16
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ # 5_8
- # 5_17
+ ping_payload = 'ping payload'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, ping_payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=True)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- # 5_18
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_9
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2')
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- # 5_19
+ # 5_10
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- time.sleep(1)
+ # 5_11
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=True,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ # 5_12
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ _, sock, _ = ws.upgrade()
- # 5_20
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ # 5_13
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ _, sock, _ = ws.upgrade()
- time.sleep(1)
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
+ # 5_14
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ _, sock, _ = ws.upgrade()
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_20'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=False,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ # 5_15
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- def test_java_websockets_6_1_1__6_4_4(self):
- self.load('websockets_mirror')
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment4', fin=True)
- # 6_1_1
+ frame = ws.frame_read(sock)
- _, sock, _ = self.ws.upgrade()
+ if frame['opcode'] == ws.OP_TEXT:
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
+ frame = None
- self.ws.frame_write(sock, self.ws.OP_TEXT, '')
- frame = self.ws.frame_read(sock, read_timeout=3)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ check_close(sock, 1002, frame=frame)
- # 6_1_2
+ # 5_16
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock, read_timeout=3)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- # 6_1_3
+ # 5_17
- payload = 'middle frame payload'
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ # 5_18
- # 6_2_1
+ _, sock, _ = ws.upgrade()
- payload = 'Hello-µ@ßöäüàá-UTF-8!!'
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2')
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ # 5_19
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- # 6_2_2
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload[:12], fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload[12:])
+ time.sleep(1)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- # 6_2_3
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- # 6_2_4
+ # 5_20
- payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ time.sleep(1)
- self.close_connection(sock)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
- # Unit does not support UTF-8 validation
- #
- # # 6_3_1 FAIL
- #
- # payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
- # payload_2 = '\xed\xa0\x80'
- # payload_3 = '\x65\x64\x69\x74\x65\x64'
- #
- # payload = payload_1 + payload_2 + payload_3
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload)
- # self.check_close(sock, 1007)
- #
- # # 6_3_2 FAIL
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
- # self.check_close(sock, 1007)
- #
- # # 6_4_1 ... 6_4_4 FAIL
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- def test_java_websockets_7_1_1__7_5_1(self):
- self.load('websockets_mirror')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_20'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- # 7_1_1
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- payload = "Hello World!"
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+def test_java_websockets_6_1_1__6_4_4():
+ client.load('websockets_mirror')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ # 6_1_1
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- # 7_1_2
+ ws.frame_write(sock, ws.OP_TEXT, '')
+ frame = ws.frame_read(sock, read_timeout=3)
+ check_frame(frame, True, ws.OP_TEXT, '')
- _, sock, _ = self.ws.upgrade()
+ # 6_1_2
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- self.check_close(sock)
+ frame = ws.frame_read(sock, read_timeout=3)
+ check_frame(frame, True, ws.OP_TEXT, '')
- # 7_1_3
+ # 6_1_3
- _, sock, _ = self.ws.upgrade()
+ payload = 'middle frame payload'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- sock.close()
+ # 6_2_1
- # 7_1_4
+ payload = 'Hello-µ@ßöäüàá-UTF-8!!'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ # 6_2_2
- sock.close()
+ ws.frame_write(sock, ws.OP_TEXT, payload[:12], fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload[12:])
- # 7_1_5
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ # 6_2_3
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- sock.close()
+ # 6_2_4
- # 7_1_6
+ payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
- _, sock, _ = self.ws.upgrade()
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.recvall(sock, read_timeout=1)
+ close_connection(sock)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- sock.close()
+# Unit does not support UTF-8 validation
+#
+# # 6_3_1 FAIL
+#
+# payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+# payload_2 = '\xed\xa0\x80'
+# payload_3 = '\x65\x64\x69\x74\x65\x64'
+#
+# payload = payload_1 + payload_2 + payload_3
+#
+# ws.message(sock, ws.OP_TEXT, payload)
+# check_close(sock, 1007)
+#
+# # 6_3_2 FAIL
+#
+# _, sock, _ = ws.upgrade()
+#
+# ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
+# check_close(sock, 1007)
+#
+# # 6_4_1 ... 6_4_4 FAIL
+
+
+def test_java_websockets_7_1_1__7_5_1():
+ client.load('websockets_mirror')
+
+ # 7_1_1
+
+ _, sock, _ = ws.upgrade()
+
+ payload = "Hello World!"
+
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
+
+ close_connection(sock)
+
+ # 7_1_2
+
+ _, sock, _ = ws.upgrade()
+
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+
+ check_close(sock)
+
+ # 7_1_3
+
+ _, sock, _ = ws.upgrade()
+
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
+
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- # 7_3_1
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 7_1_4
- self.ws.frame_write(sock, self.ws.OP_CLOSE, '')
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- # 7_3_2
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, 'a')
- self.check_close(sock, 1002)
+ sock.close()
- # 7_3_3
+ # 7_1_5
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- # 7_3_4
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- _, sock, _ = self.ws.upgrade()
+ sock.close()
- payload = self.ws.serialize_close(reason='Hello World!')
+ # 7_1_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- # 7_3_5
+ ws.frame_write(sock, ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- _, sock, _ = self.ws.upgrade()
+ client.recvall(sock, read_timeout=1)
- payload = self.ws.serialize_close(reason='*' * 123)
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ sock.close()
- # 7_3_6
+ # 7_3_1
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- payload = self.ws.serialize_close(reason='*' * 124)
+ ws.frame_write(sock, ws.OP_CLOSE, '')
+ check_close(sock)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ # 7_3_2
- # # 7_5_1 FAIL Unit does not support UTF-8 validation
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # payload = self.ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
- # '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
- #
- # self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- # self.check_close(sock, 1007)
+ _, sock, _ = ws.upgrade()
- def test_java_websockets_7_7_X__7_9_X(self):
- self.load('websockets_mirror')
+ ws.frame_write(sock, ws.OP_CLOSE, 'a')
+ check_close(sock, 1002)
- valid_codes = [
- 1000,
- 1001,
- 1002,
- 1003,
- 1007,
- 1008,
- 1009,
- 1010,
- 1011,
- 3000,
- 3999,
- 4000,
- 4999,
- ]
+ # 7_3_3
- invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+ _, sock, _ = ws.upgrade()
- for code in valid_codes:
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock)
- payload = self.ws.serialize_close(code=code)
+ # 7_3_4
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, code=code)
+ _, sock, _ = ws.upgrade()
- for code in invalid_codes:
- _, sock, _ = self.ws.upgrade()
+ payload = ws.serialize_close(reason='Hello World!')
- payload = self.ws.serialize_close(code=code)
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ # 7_3_5
- def test_java_websockets_7_13_1__7_13_2(self):
- self.load('websockets_mirror')
+ _, sock, _ = ws.upgrade()
- # 7_13_1
+ payload = ws.serialize_close(reason='*' * 123)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- payload = self.ws.serialize_close(code=5000)
+ # 7_3_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 7_13_2
+ payload = ws.serialize_close(reason='*' * 124)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
- payload = struct.pack('!I', 65536) + ''.encode('utf-8')
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+# # 7_5_1 FAIL Unit does not support UTF-8 validation
+#
+# _, sock, _ = ws.upgrade()
+#
+# payload = ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
+# '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
+#
+# ws.frame_write(sock, ws.OP_CLOSE, payload)
+# check_close(sock, 1007)
- def test_java_websockets_9_1_1__9_6_6(self, is_unsafe):
- if not is_unsafe:
- pytest.skip('unsafe, long run')
- self.load('websockets_mirror')
+def test_java_websockets_7_7_X__7_9_X():
+ client.load('websockets_mirror')
- assert 'success' in self.conf(
- {
- 'http': {
- 'websocket': {
- 'max_frame_size': 33554432,
- 'keepalive_interval': 0,
- }
+ valid_codes = [
+ 1000,
+ 1001,
+ 1002,
+ 1003,
+ 1007,
+ 1008,
+ 1009,
+ 1010,
+ 1011,
+ 3000,
+ 3999,
+ 4000,
+ 4999,
+ ]
+
+ invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+
+ for code in valid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, code=code)
+
+ for code in invalid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_java_websockets_7_13_1__7_13_2():
+ client.load('websockets_mirror')
+
+ # 7_13_1
+
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=5000)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+ # 7_13_2
+
+ _, sock, _ = ws.upgrade()
+
+ payload = struct.pack('!I', 65536) + ''.encode('utf-8')
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_java_websockets_9_1_1__9_6_6(is_unsafe, system):
+ if not is_unsafe:
+ pytest.skip('unsafe, long run')
+
+ client.load('websockets_mirror')
+
+ assert 'success' in client.conf(
+ {
+ 'http': {
+ 'websocket': {
+ 'max_frame_size': 33554432,
+ 'keepalive_interval': 0,
}
- },
- 'settings',
- ), 'increase max_frame_size and keepalive_interval'
-
- _, sock, _ = self.ws.upgrade()
-
- op_text = self.ws.OP_TEXT
- op_binary = self.ws.OP_BINARY
-
- def check_payload(opcode, length, chopsize=None):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * length
- else:
- payload = b'*' * length
+ }
+ },
+ 'settings',
+ ), 'increase max_frame_size and keepalive_interval'
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
+ _, sock, _ = ws.upgrade()
- def check_message(opcode, f_size):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * 4 * 2**20
- else:
- payload = b'*' * 4 * 2**20
+ op_text = ws.OP_TEXT
+ op_binary = ws.OP_BINARY
- self.ws.message(sock, opcode, payload, fragmention_size=f_size)
- frame = self.ws.frame_read(sock, read_timeout=5)
- self.check_frame(frame, True, opcode, payload)
+ def check_payload(opcode, length, chopsize=None):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * length
+ else:
+ payload = b'*' * length
- check_payload(op_text, 64 * 2**10) # 9_1_1
- check_payload(op_text, 256 * 2**10) # 9_1_2
- check_payload(op_text, 2**20) # 9_1_3
- check_payload(op_text, 4 * 2**20) # 9_1_4
- check_payload(op_text, 8 * 2**20) # 9_1_5
- check_payload(op_text, 16 * 2**20) # 9_1_6
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- check_payload(op_binary, 64 * 2**10) # 9_2_1
- check_payload(op_binary, 256 * 2**10) # 9_2_2
- check_payload(op_binary, 2**20) # 9_2_3
- check_payload(op_binary, 4 * 2**20) # 9_2_4
- check_payload(op_binary, 8 * 2**20) # 9_2_5
- check_payload(op_binary, 16 * 2**20) # 9_2_6
+ def check_message(opcode, f_size):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * 4 * 2**20
+ else:
+ payload = b'*' * 4 * 2**20
- if option.system != 'Darwin' and option.system != 'FreeBSD':
- check_message(op_text, 64) # 9_3_1
- check_message(op_text, 256) # 9_3_2
- check_message(op_text, 2**10) # 9_3_3
- check_message(op_text, 4 * 2**10) # 9_3_4
- check_message(op_text, 16 * 2**10) # 9_3_5
- check_message(op_text, 64 * 2**10) # 9_3_6
- check_message(op_text, 256 * 2**10) # 9_3_7
- check_message(op_text, 2**20) # 9_3_8
- check_message(op_text, 4 * 2**20) # 9_3_9
+ ws.message(sock, opcode, payload, fragmention_size=f_size)
+ frame = ws.frame_read(sock, read_timeout=5)
+ check_frame(frame, True, opcode, payload)
- check_message(op_binary, 64) # 9_4_1
- check_message(op_binary, 256) # 9_4_2
- check_message(op_binary, 2**10) # 9_4_3
- check_message(op_binary, 4 * 2**10) # 9_4_4
- check_message(op_binary, 16 * 2**10) # 9_4_5
- check_message(op_binary, 64 * 2**10) # 9_4_6
- check_message(op_binary, 256 * 2**10) # 9_4_7
- check_message(op_binary, 2**20) # 9_4_8
- check_message(op_binary, 4 * 2**20) # 9_4_9
+ check_payload(op_text, 64 * 2**10) # 9_1_1
+ check_payload(op_text, 256 * 2**10) # 9_1_2
+ check_payload(op_text, 2**20) # 9_1_3
+ check_payload(op_text, 4 * 2**20) # 9_1_4
+ check_payload(op_text, 8 * 2**20) # 9_1_5
+ check_payload(op_text, 16 * 2**20) # 9_1_6
- check_payload(op_text, 2**20, chopsize=64) # 9_5_1
- check_payload(op_text, 2**20, chopsize=128) # 9_5_2
- check_payload(op_text, 2**20, chopsize=256) # 9_5_3
- check_payload(op_text, 2**20, chopsize=512) # 9_5_4
- check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
- check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
+ check_payload(op_binary, 64 * 2**10) # 9_2_1
+ check_payload(op_binary, 256 * 2**10) # 9_2_2
+ check_payload(op_binary, 2**20) # 9_2_3
+ check_payload(op_binary, 4 * 2**20) # 9_2_4
+ check_payload(op_binary, 8 * 2**20) # 9_2_5
+ check_payload(op_binary, 16 * 2**20) # 9_2_6
- check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
- check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
- check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
- check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
- check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
- check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
+ if system not in ['Darwin', 'FreeBSD']:
+ check_message(op_text, 64) # 9_3_1
+ check_message(op_text, 256) # 9_3_2
+ check_message(op_text, 2**10) # 9_3_3
+ check_message(op_text, 4 * 2**10) # 9_3_4
+ check_message(op_text, 16 * 2**10) # 9_3_5
+ check_message(op_text, 64 * 2**10) # 9_3_6
+ check_message(op_text, 256 * 2**10) # 9_3_7
+ check_message(op_text, 2**20) # 9_3_8
+ check_message(op_text, 4 * 2**20) # 9_3_9
- self.close_connection(sock)
+ check_message(op_binary, 64) # 9_4_1
+ check_message(op_binary, 256) # 9_4_2
+ check_message(op_binary, 2**10) # 9_4_3
+ check_message(op_binary, 4 * 2**10) # 9_4_4
+ check_message(op_binary, 16 * 2**10) # 9_4_5
+ check_message(op_binary, 64 * 2**10) # 9_4_6
+ check_message(op_binary, 256 * 2**10) # 9_4_7
+ check_message(op_binary, 2**20) # 9_4_8
+ check_message(op_binary, 4 * 2**20) # 9_4_9
- def test_java_websockets_10_1_1(self):
- self.load('websockets_mirror')
+ check_payload(op_text, 2**20, chopsize=64) # 9_5_1
+ check_payload(op_text, 2**20, chopsize=128) # 9_5_2
+ check_payload(op_text, 2**20, chopsize=256) # 9_5_3
+ check_payload(op_text, 2**20, chopsize=512) # 9_5_4
+ check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
+ check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
- _, sock, _ = self.ws.upgrade()
+ check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
+ check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
+ check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
+ check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
+ check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
+ check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
- payload = '*' * 65536
+ close_connection(sock)
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1300)
- frame = self.ws.message_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+def test_java_websockets_10_1_1():
+ client.load('websockets_mirror')
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- # settings
+ payload = '*' * 65536
- def test_java_websockets_max_frame_size(self):
- self.load('websockets_mirror')
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1300)
- assert 'success' in self.conf(
- {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
- ), 'configure max_frame_size'
+ frame = ws.message_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- payload = '*' * 94
- opcode = self.ws.OP_TEXT
- self.ws.frame_write(sock, opcode, payload) # frame length is 100
+# settings
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
- payload = '*' * 95
+def test_java_websockets_max_frame_size():
+ client.load('websockets_mirror')
- self.ws.frame_write(sock, opcode, payload) # frame length is 101
- self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
+ ), 'configure max_frame_size'
- def test_java_websockets_read_timeout(self):
- self.load('websockets_mirror')
+ _, sock, _ = ws.upgrade()
- assert 'success' in self.conf(
- {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
- ), 'configure read_timeout'
+ payload = '*' * 94
+ opcode = ws.OP_TEXT
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, opcode, payload) # frame length is 100
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- time.sleep(2)
+ payload = '*' * 95
- self.check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
+ ws.frame_write(sock, opcode, payload) # frame length is 101
+ check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
- def test_java_websockets_keepalive_interval(self):
- self.load('websockets_mirror')
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
- ), 'configure keepalive_interval'
+def test_java_websockets_read_timeout():
+ client.load('websockets_mirror')
- _, sock, _ = self.ws.upgrade()
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
+ ), 'configure read_timeout'
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ _, sock, _ = ws.upgrade()
- time.sleep(2)
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PING, '') # PING frame
+ time.sleep(2)
- sock.close()
+ check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
+
+
+def test_java_websockets_keepalive_interval():
+ client.load('websockets_mirror')
+
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
+ ), 'configure keepalive_interval'
+
+ _, sock, _ = ws.upgrade()
+
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
+
+ time.sleep(2)
+
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PING, '') # PING frame
+
+ sock.close()
diff --git a/test/test_njs.py b/test/test_njs.py
index a7261290..162cc0bd 100644
--- a/test/test_njs.py
+++ b/test/test_njs.py
@@ -1,92 +1,101 @@
import os
-from unit.applications.proto import TestApplicationProto
+import pytest
+from unit.applications.proto import ApplicationProto
from unit.option import option
from unit.utils import waitforfiles
+prerequisites = {'modules': {'njs': 'any'}}
-class TestNJS(TestApplicationProto):
- prerequisites = {'modules': {'njs': 'any'}}
+client = ApplicationProto()
- def setup_method(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {"action": {"share": f"{option.temp_dir}/assets$uri"}}
- ],
- }
- )
- def create_files(self, *files):
- assets_dir = f'{option.temp_dir}/assets/'
- os.makedirs(assets_dir)
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f"{temp_dir}/assets$uri"}}],
+ }
+ )
- [open(assets_dir + f, 'a') for f in files]
- waitforfiles(*[assets_dir + f for f in files])
- def set_share(self, share):
- assert 'success' in self.conf(share, 'routes/0/action/share')
+def create_files(*files):
+ assets_dir = f'{option.temp_dir}/assets/'
+ os.makedirs(assets_dir)
- def check_expression(self, expression, url='/'):
- self.set_share(f'"`{option.temp_dir}/assets{expression}`"')
- assert self.get(url=url)['status'] == 200
+ [open(assets_dir + f, 'a') for f in files]
+ waitforfiles(*[assets_dir + f for f in files])
- def test_njs_template_string(self, temp_dir):
- self.create_files('str', '`string`', '`backtick', 'l1\nl2')
- self.check_expression('/str')
- self.check_expression('/\\\\`backtick')
- self.check_expression('/l1\\nl2')
+def set_share(share):
+ assert 'success' in client.conf(share, 'routes/0/action/share')
- self.set_share(f'"{temp_dir}/assets/`string`"')
- assert self.get()['status'] == 200
- def test_njs_template_expression(self, temp_dir):
- self.create_files('str', 'localhost')
+def check_expression(expression, url='/'):
+ set_share(f'"`{option.temp_dir}/assets{expression}`"')
+ assert client.get(url=url)['status'] == 200
- self.check_expression('${uri}', '/str')
- self.check_expression('${uri}${host}')
- self.check_expression('${uri + host}')
- self.check_expression('${uri + `${host}`}')
- def test_njs_iteration(self, temp_dir):
- self.create_files('Connection,Host', 'close,localhost')
+def test_njs_template_string(temp_dir):
+ create_files('str', '`string`', '`backtick', 'l1\nl2')
- self.check_expression('/${Object.keys(headers).sort().join()}')
- self.check_expression('/${Object.values(headers).sort().join()}')
+ check_expression('/str')
+ check_expression('/\\\\`backtick')
+ check_expression('/l1\\nl2')
- def test_njs_variables(self, temp_dir):
- self.create_files('str', 'localhost', '127.0.0.1')
+ set_share(f'"{temp_dir}/assets/`string`"')
+ assert client.get()['status'] == 200
- self.check_expression('/${host}')
- self.check_expression('/${remoteAddr}')
- self.check_expression('/${headers.Host}')
- self.set_share(f'"`{temp_dir}/assets/${{cookies.foo}}`"')
- assert (
- self.get(headers={'Cookie': 'foo=str', 'Connection': 'close'})[
- 'status'
- ]
- == 200
- ), 'cookies'
+def test_njs_template_expression():
+ create_files('str', 'localhost')
- self.set_share(f'"`{temp_dir}/assets/${{args.foo}}`"')
- assert self.get(url='/?foo=str')['status'] == 200, 'args'
+ check_expression('${uri}', '/str')
+ check_expression('${uri}${host}')
+ check_expression('${uri + host}')
+ check_expression('${uri + `${host}`}')
- def test_njs_invalid(self, temp_dir, skip_alert):
- skip_alert(r'js exception:')
- def check_invalid(template):
- assert 'error' in self.conf(template, 'routes/0/action/share')
+def test_njs_iteration():
+ create_files('Connection,Host', 'close,localhost')
- check_invalid('"`a"')
- check_invalid('"`a``"')
- check_invalid('"`a`/"')
+ check_expression('/${Object.keys(headers).sort().join()}')
+ check_expression('/${Object.values(headers).sort().join()}')
- def check_invalid_resolve(template):
- assert 'success' in self.conf(template, 'routes/0/action/share')
- assert self.get()['status'] == 500
- check_invalid_resolve('"`${a}`"')
- check_invalid_resolve('"`${uri.a.a}`"')
+def test_njs_variables(temp_dir):
+ create_files('str', 'localhost', '127.0.0.1')
+
+ check_expression('/${host}')
+ check_expression('/${remoteAddr}')
+ check_expression('/${headers.Host}')
+
+ set_share(f'"`{temp_dir}/assets/${{cookies.foo}}`"')
+ assert (
+ client.get(headers={'Cookie': 'foo=str', 'Connection': 'close'})[
+ 'status'
+ ]
+ == 200
+ ), 'cookies'
+
+ set_share(f'"`{temp_dir}/assets/${{args.foo}}`"')
+ assert client.get(url='/?foo=str')['status'] == 200, 'args'
+
+
+def test_njs_invalid(skip_alert):
+ skip_alert(r'js exception:')
+
+ def check_invalid(template):
+ assert 'error' in client.conf(template, 'routes/0/action/share')
+
+ check_invalid('"`a"')
+ check_invalid('"`a``"')
+ check_invalid('"`a`/"')
+
+ def check_invalid_resolve(template):
+ assert 'success' in client.conf(template, 'routes/0/action/share')
+ assert client.get()['status'] == 500
+
+ check_invalid_resolve('"`${a}`"')
+ check_invalid_resolve('"`${uri.a.a}`"')
diff --git a/test/test_njs_modules.py b/test/test_njs_modules.py
index ce592fe4..d821d455 100644
--- a/test/test_njs_modules.py
+++ b/test/test_njs_modules.py
@@ -1,99 +1,104 @@
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
+prerequisites = {'modules': {'njs': 'any'}}
-class TestNJSModules(TestApplicationProto):
- prerequisites = {'modules': {'njs': 'any'}}
+client = ApplicationProto()
- def njs_script_load(self, module, name=None, expect='success'):
- if name is None:
- name = module
- with open(f'{option.test_dir}/njs/{module}/script.js', 'rb') as s:
- assert expect in self.conf(s.read(), f'/js_modules/{name}')
+def njs_script_load(module, name=None, expect='success'):
+ if name is None:
+ name = module
- def test_njs_modules(self):
- self.njs_script_load('next')
+ with open(f'{option.test_dir}/njs/{module}/script.js', 'rb') as script:
+ assert expect in client.conf(script.read(), f'/js_modules/{name}')
- assert 'export' in self.conf_get('/js_modules/next')
- assert 'error' in self.conf_post('"blah"', '/js_modules/next')
- assert 'success' in self.conf(
- {
- "settings": {"js_module": "next"},
- "listeners": {"*:7080": {"pass": "routes/first"}},
- "routes": {
- "first": [{"action": {"pass": "`routes/${next.route()}`"}}],
- "next": [{"action": {"return": 200}}],
- },
- }
- )
- assert self.get()['status'] == 200, 'string'
+def test_njs_modules():
+ njs_script_load('next')
- assert 'success' in self.conf({"js_module": ["next"]}, 'settings')
- assert self.get()['status'] == 200, 'array'
+ assert 'export' in client.conf_get('/js_modules/next')
+ assert 'error' in client.conf_post('"blah"', '/js_modules/next')
- # add one more value to array
+ assert 'success' in client.conf(
+ {
+ "settings": {"js_module": "next"},
+ "listeners": {"*:7080": {"pass": "routes/first"}},
+ "routes": {
+ "first": [{"action": {"pass": "`routes/${next.route()}`"}}],
+ "next": [{"action": {"return": 200}}],
+ },
+ }
+ )
+ assert client.get()['status'] == 200, 'string'
- assert len(self.conf_get('/js_modules').keys()) == 1
+ assert 'success' in client.conf({"js_module": ["next"]}, 'settings')
+ assert client.get()['status'] == 200, 'array'
- self.njs_script_load('next', 'next_2')
+ # add one more value to array
- assert len(self.conf_get('/js_modules').keys()) == 2
+ assert len(client.conf_get('/js_modules').keys()) == 1
- assert 'success' in self.conf_post('"next_2"', 'settings/js_module')
- assert self.get()['status'] == 200, 'array len 2'
+ njs_script_load('next', 'next_2')
- assert 'success' in self.conf(
- '"`routes/${next_2.route()}`"', 'routes/first/0/action/pass'
- )
- assert self.get()['status'] == 200, 'array new'
+ assert len(client.conf_get('/js_modules').keys()) == 2
- # can't update exsisting script
+ assert 'success' in client.conf_post('"next_2"', 'settings/js_module')
+ assert client.get()['status'] == 200, 'array len 2'
- self.njs_script_load('global_this', 'next', expect='error')
+ assert 'success' in client.conf(
+ '"`routes/${next_2.route()}`"', 'routes/first/0/action/pass'
+ )
+ assert client.get()['status'] == 200, 'array new'
- # delete modules
+ # can't update exsisting script
- assert 'error' in self.conf_delete('/js_modules/next_2')
- assert 'success' in self.conf_delete('settings/js_module')
- assert 'success' in self.conf_delete('/js_modules/next_2')
+ njs_script_load('global_this', 'next', expect='error')
- def test_njs_modules_import(self):
- self.njs_script_load('import_from')
+ # delete modules
- assert 'success' in self.conf(
- {
- "settings": {"js_module": "import_from"},
- "listeners": {"*:7080": {"pass": "routes/first"}},
- "routes": {
- "first": [
- {"action": {"pass": "`routes/${import_from.num()}`"}}
- ],
- "number": [{"action": {"return": 200}}],
- },
- }
- )
- assert self.get()['status'] == 200
+ assert 'error' in client.conf_delete('/js_modules/next_2')
+ assert 'success' in client.conf_delete('settings/js_module')
+ assert 'success' in client.conf_delete('/js_modules/next_2')
- def test_njs_modules_this(self):
- self.njs_script_load('global_this')
- assert 'success' in self.conf(
- {
- "settings": {"js_module": "global_this"},
- "listeners": {"*:7080": {"pass": "routes/first"}},
- "routes": {
- "first": [
- {"action": {"pass": "`routes/${global_this.str()}`"}}
- ],
- "string": [{"action": {"return": 200}}],
- },
- }
- )
- assert self.get()['status'] == 200
+def test_njs_modules_import():
+ njs_script_load('import_from')
- def test_njs_modules_invalid(self, skip_alert):
- skip_alert(r'.*JS compile module.*failed.*')
+ assert 'success' in client.conf(
+ {
+ "settings": {"js_module": "import_from"},
+ "listeners": {"*:7080": {"pass": "routes/first"}},
+ "routes": {
+ "first": [
+ {"action": {"pass": "`routes/${import_from.num()}`"}}
+ ],
+ "number": [{"action": {"return": 200}}],
+ },
+ }
+ )
+ assert client.get()['status'] == 200
- self.njs_script_load('invalid', expect='error')
+
+def test_njs_modules_this():
+ njs_script_load('global_this')
+
+ assert 'success' in client.conf(
+ {
+ "settings": {"js_module": "global_this"},
+ "listeners": {"*:7080": {"pass": "routes/first"}},
+ "routes": {
+ "first": [
+ {"action": {"pass": "`routes/${global_this.str()}`"}}
+ ],
+ "string": [{"action": {"return": 200}}],
+ },
+ }
+ )
+ assert client.get()['status'] == 200
+
+
+def test_njs_modules_invalid(skip_alert):
+ skip_alert(r'.*JS compile module.*failed.*')
+
+ njs_script_load('invalid', expect='error')
diff --git a/test/test_node_application.py b/test/test_node_application.py
index 719afae8..e4226535 100644
--- a/test/test_node_application.py
+++ b/test/test_node_application.py
@@ -1,307 +1,332 @@
import re
import pytest
-from unit.applications.lang.node import TestApplicationNode
+from unit.applications.lang.node import ApplicationNode
from unit.utils import waitforfiles
+prerequisites = {'modules': {'node': 'all'}}
-class TestNodeApplication(TestApplicationNode):
- prerequisites = {'modules': {'node': 'all'}}
+client = ApplicationNode()
- def assert_basic_application(self):
- resp = self.get()
- assert resp['headers']['Content-Type'] == 'text/plain', 'basic header'
- assert resp['body'] == 'Hello World\n', 'basic body'
- def test_node_application_basic(self):
- self.load('basic')
+def assert_basic_application():
+ resp = client.get()
+ assert resp['headers']['Content-Type'] == 'text/plain', 'basic header'
+ assert resp['body'] == 'Hello World\n', 'basic body'
- self.assert_basic_application()
- def test_node_application_loader_unit_http(self):
- self.load('loader/unit_http')
+def test_node_application_basic():
+ client.load('basic')
- self.assert_basic_application()
+ assert_basic_application()
- def test_node_application_loader_transitive_dependency(self):
- self.load('loader/transitive_dependency')
- self.assert_basic_application()
+def test_node_application_loader_unit_http():
+ client.load('loader/unit_http')
- def test_node_application_seq(self):
- self.load('basic')
+ assert_basic_application()
- assert self.get()['status'] == 200, 'seq'
- assert self.get()['status'] == 200, 'seq 2'
- def test_node_application_variables(self):
- self.load('variables')
+def test_node_application_loader_transitive_dependency():
+ client.load('loader/transitive_dependency')
- body = 'Test body string.'
+ assert_basic_application()
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': 'blah',
- 'Connection': 'close',
- },
- body=body,
- )
-
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- raw_headers = headers.pop('Request-Raw-Headers')
- assert re.search(
- r'^(?:Host|localhost|Content-Type|'
- r'text\/html|Custom-Header|blah|Content-Length|17|Connection|'
- r'close|,)+$',
- raw_headers,
- ), 'raw headers'
-
- assert headers == {
- 'Connection': 'close',
- 'Content-Length': str(len(body)),
+
+def test_node_application_seq():
+ client.load('basic')
+
+ assert client.get()['status'] == 200, 'seq'
+ assert client.get()['status'] == 200, 'seq 2'
+
+
+def test_node_application_variables(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
+
+ body = 'Test body string.'
+
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
'Content-Type': 'text/html',
- 'Request-Method': 'POST',
- 'Request-Uri': '/',
- 'Http-Host': 'localhost',
- 'Server-Protocol': 'HTTP/1.1',
'Custom-Header': 'blah',
- }, 'headers'
- assert resp['body'] == body, 'body'
+ 'Connection': 'close',
+ },
+ body=body,
+ )
+
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
+
+ raw_headers = headers.pop('Request-Raw-Headers')
+ assert re.search(
+ r'^(?:Host|localhost|Content-Type|'
+ r'text\/html|Custom-Header|blah|Content-Length|17|Connection|'
+ r'close|,)+$',
+ raw_headers,
+ ), 'raw headers'
+
+ assert headers == {
+ 'Connection': 'close',
+ 'Content-Length': str(len(body)),
+ 'Content-Type': 'text/html',
+ 'Request-Method': 'POST',
+ 'Request-Uri': '/',
+ 'Http-Host': 'localhost',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Custom-Header': 'blah',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
+
+
+def test_node_application_get_variables():
+ client.load('get_variables')
+
+ resp = client.get(url='/?var1=val1&var2=&var3')
+ assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
+ assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
+ assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
+
+
+def test_node_application_post_variables():
+ client.load('post_variables')
+
+ resp = client.post(
+ headers={
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ },
+ body='var1=val1&var2=&var3',
+ )
+
+ assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
+ assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
+ assert resp['headers']['X-Var-3'] == '', 'POST variables 3'
+
+
+def test_node_application_404():
+ client.load('404')
+
+ resp = client.get()
+
+ assert resp['status'] == 404, '404 status'
+ assert re.search(r'<title>404 Not Found</title>', resp['body']), '404 body'
+
+
+def test_node_keepalive_body():
+ client.load('mirror')
+
+ assert client.get()['status'] == 200, 'init'
+
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
+
+ assert resp['body'] == '0123456789' * 500, 'keep-alive 1'
+
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
+
+ assert resp['body'] == body, 'keep-alive 2'
+
+
+def test_node_application_write_buffer():
+ client.load('write_buffer')
+
+ assert client.get()['body'] == 'buffer', 'write buffer'
+
+
+def test_node_application_write_callback(temp_dir):
+ client.load('write_callback')
+
+ assert client.get()['body'] == 'helloworld', 'write callback order'
+ assert waitforfiles(f'{temp_dir}/node/callback'), 'write callback'
+
+
+def test_node_application_write_before_write_head():
+ client.load('write_before_write_head')
+
+ assert client.get()['status'] == 200, 'write before writeHead'
+
+
+def test_node_application_double_end():
+ client.load('double_end')
- def test_node_application_get_variables(self):
- self.load('get_variables')
+ assert client.get()['status'] == 200, 'double end'
+ assert client.get()['status'] == 200, 'double end 2'
- resp = self.get(url='/?var1=val1&var2=&var3')
- assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
- assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
- assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
- def test_node_application_post_variables(self):
- self.load('post_variables')
+def test_node_application_write_return():
+ client.load('write_return')
- resp = self.post(
+ assert client.get()['body'] == 'bodytrue', 'write return'
+
+
+def test_node_application_remove_header():
+ client.load('remove_header')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Remove': 'X-Header',
+ 'Connection': 'close',
+ }
+ )
+ assert resp['headers']['Was-Header'] == 'true', 'was header'
+ assert resp['headers']['Has-Header'] == 'false', 'has header'
+ assert not ('X-Header' in resp['headers']), 'remove header'
+
+
+def test_node_application_remove_header_nonexisting():
+ client.load('remove_header')
+
+ assert (
+ client.get(
headers={
- 'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'localhost',
+ 'X-Remove': 'blah',
'Connection': 'close',
- },
- body='var1=val1&var2=&var3',
- )
+ }
+ )['headers']['Has-Header']
+ == 'true'
+ ), 'remove header nonexisting'
+
+
+def test_node_application_update_header():
+ client.load('update_header')
+
+ assert client.get()['headers']['X-Header'] == 'new', 'update header'
+
+
+def test_node_application_set_header_array():
+ client.load('set_header_array')
+
+ assert client.get()['headers']['Set-Cookie'] == [
+ 'tc=one,two,three',
+ 'tc=four,five,six',
+ ], 'set header array'
+
+
+@pytest.mark.skip('not yet')
+def test_node_application_status_message():
+ client.load('status_message')
+
+ assert re.search(r'200 blah', client.get(raw_resp=True)), 'status message'
- assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
- assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
- assert resp['headers']['X-Var-3'] == '', 'POST variables 3'
- def test_node_application_404(self):
- self.load('404')
+def test_node_application_get_header_type():
+ client.load('get_header_type')
- resp = self.get()
+ assert client.get()['headers']['X-Type'] == 'number', 'get header type'
- assert resp['status'] == 404, '404 status'
- assert re.search(
- r'<title>404 Not Found</title>', resp['body']
- ), '404 body'
- def test_node_keepalive_body(self):
- self.load('mirror')
+def test_node_application_header_name_case():
+ client.load('header_name_case')
- assert self.get()['status'] == 200, 'init'
+ headers = client.get()['headers']
- body = '0123456789' * 500
- (resp, sock) = self.post(
+ assert headers['X-HEADER'] == '3', 'header value'
+ assert 'X-Header' not in headers, 'insensitive'
+ assert 'X-header' not in headers, 'insensitive 2'
+
+
+def test_node_application_promise_handler_write_after_end():
+ client.load('promise_handler')
+
+ assert (
+ client.post(
headers={
'Host': 'localhost',
- 'Connection': 'keep-alive',
+ 'Content-Type': 'text/html',
+ 'X-Write-Call': '1',
+ 'Connection': 'close',
},
- start=True,
- body=body,
- read_timeout=1,
- )
+ body='callback',
+ )['status']
+ == 200
+ ), 'promise handler request write after end'
- assert resp['body'] == '0123456789' * 500, 'keep-alive 1'
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
+def test_node_application_promise_end(temp_dir):
+ client.load('promise_end')
+
+ assert (
+ client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Type': 'text/html',
+ 'Connection': 'close',
+ },
+ body='end',
+ )['status']
+ == 200
+ ), 'promise end request'
+ assert waitforfiles(f'{temp_dir}/node/callback'), 'promise end'
- assert resp['body'] == body, 'keep-alive 2'
- def test_node_application_write_buffer(self):
- self.load('write_buffer')
+@pytest.mark.skip('not yet')
+def test_node_application_header_name_valid():
+ client.load('header_name_valid')
- assert self.get()['body'] == 'buffer', 'write buffer'
+ assert 'status' not in client.get(), 'header name valid'
- def test_node_application_write_callback(self, temp_dir):
- self.load('write_callback')
- assert self.get()['body'] == 'helloworld', 'write callback order'
- assert waitforfiles(f'{temp_dir}/node/callback'), 'write callback'
+def test_node_application_header_value_object():
+ client.load('header_value_object')
- def test_node_application_write_before_write_head(self):
- self.load('write_before_write_head')
+ assert 'X-Header' in client.get()['headers'], 'header value object'
- assert self.get()['status'] == 200, 'write before writeHead'
- def test_node_application_double_end(self):
- self.load('double_end')
+def test_node_application_get_header_names():
+ client.load('get_header_names')
- assert self.get()['status'] == 200, 'double end'
- assert self.get()['status'] == 200, 'double end 2'
+ assert client.get()['headers']['X-Names'] == [
+ 'date',
+ 'x-header',
+ ], 'get header names'
- def test_node_application_write_return(self):
- self.load('write_return')
- assert self.get()['body'] == 'bodytrue', 'write return'
+def test_node_application_has_header():
+ client.load('has_header')
- def test_node_application_remove_header(self):
- self.load('remove_header')
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Header': 'length',
+ 'Connection': 'close',
+ }
+ )['headers']['X-Has-Header']
+ == 'false'
+ ), 'has header length'
- resp = self.get(
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'X-Remove': 'X-Header',
+ 'X-Header': 'Date',
'Connection': 'close',
}
- )
- assert resp['headers']['Was-Header'] == 'true', 'was header'
- assert resp['headers']['Has-Header'] == 'false', 'has header'
- assert not ('X-Header' in resp['headers']), 'remove header'
-
- def test_node_application_remove_header_nonexisting(self):
- self.load('remove_header')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Remove': 'blah',
- 'Connection': 'close',
- }
- )['headers']['Has-Header']
- == 'true'
- ), 'remove header nonexisting'
-
- def test_node_application_update_header(self):
- self.load('update_header')
-
- assert self.get()['headers']['X-Header'] == 'new', 'update header'
-
- def test_node_application_set_header_array(self):
- self.load('set_header_array')
-
- assert self.get()['headers']['Set-Cookie'] == [
- 'tc=one,two,three',
- 'tc=four,five,six',
- ], 'set header array'
-
- @pytest.mark.skip('not yet')
- def test_node_application_status_message(self):
- self.load('status_message')
-
- assert re.search(r'200 blah', self.get(raw_resp=True)), 'status message'
-
- def test_node_application_get_header_type(self):
- self.load('get_header_type')
-
- assert self.get()['headers']['X-Type'] == 'number', 'get header type'
-
- def test_node_application_header_name_case(self):
- self.load('header_name_case')
-
- headers = self.get()['headers']
-
- assert headers['X-HEADER'] == '3', 'header value'
- assert 'X-Header' not in headers, 'insensitive'
- assert 'X-header' not in headers, 'insensitive 2'
-
- def test_node_application_promise_handler_write_after_end(self):
- self.load('promise_handler')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'X-Write-Call': '1',
- 'Connection': 'close',
- },
- body='callback',
- )['status']
- == 200
- ), 'promise handler request write after end'
-
- def test_node_application_promise_end(self, temp_dir):
- self.load('promise_end')
-
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Connection': 'close',
- },
- body='end',
- )['status']
- == 200
- ), 'promise end request'
- assert waitforfiles(f'{temp_dir}/node/callback'), 'promise end'
-
- @pytest.mark.skip('not yet')
- def test_node_application_header_name_valid(self):
- self.load('header_name_valid')
-
- assert 'status' not in self.get(), 'header name valid'
-
- def test_node_application_header_value_object(self):
- self.load('header_value_object')
-
- assert 'X-Header' in self.get()['headers'], 'header value object'
-
- def test_node_application_get_header_names(self):
- self.load('get_header_names')
-
- assert self.get()['headers']['X-Names'] == [
- 'date',
- 'x-header',
- ], 'get header names'
-
- def test_node_application_has_header(self):
- self.load('has_header')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Header': 'length',
- 'Connection': 'close',
- }
- )['headers']['X-Has-Header']
- == 'false'
- ), 'has header length'
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Header': 'Date',
- 'Connection': 'close',
- }
- )['headers']['X-Has-Header']
- == 'false'
- ), 'has header date'
-
- def test_node_application_write_multiple(self):
- self.load('write_multiple')
-
- assert self.get()['body'] == 'writewrite2end', 'write multiple'
+ )['headers']['X-Has-Header']
+ == 'false'
+ ), 'has header date'
+
+
+def test_node_application_write_multiple():
+ client.load('write_multiple')
+
+ assert client.get()['body'] == 'writewrite2end', 'write multiple'
diff --git a/test/test_node_es_modules.py b/test/test_node_es_modules.py
index 8a9cb181..ac2c545f 100644
--- a/test/test_node_es_modules.py
+++ b/test/test_node_es_modules.py
@@ -1,48 +1,48 @@
from packaging import version
-from unit.applications.lang.node import TestApplicationNode
-from unit.applications.websockets import TestApplicationWebsocket
+from unit.applications.lang.node import ApplicationNode
+from unit.applications.websockets import ApplicationWebsocket
+prerequisites = {
+ 'modules': {'node': lambda v: version.parse(v) >= version.parse('14.16.0')}
+}
-class TestNodeESModules(TestApplicationNode):
- prerequisites = {
- 'modules': {
- 'node': lambda v: version.parse(v) >= version.parse('14.16.0')
- }
- }
+client = ApplicationNode(es_modules=True)
+ws = ApplicationWebsocket()
- es_modules = True
- ws = TestApplicationWebsocket()
- def assert_basic_application(self):
- resp = self.get()
- assert resp['headers']['Content-Type'] == 'text/plain', 'basic header'
- assert resp['body'] == 'Hello World\n', 'basic body'
+def assert_basic_application():
+ resp = client.get()
+ assert resp['headers']['Content-Type'] == 'text/plain', 'basic header'
+ assert resp['body'] == 'Hello World\n', 'basic body'
- def test_node_es_modules_loader_http(self):
- self.load('loader/es_modules_http', name="app.mjs")
- self.assert_basic_application()
+def test_node_es_modules_loader_http():
+ client.load('loader/es_modules_http', name="app.mjs")
- def test_node_es_modules_loader_http_indirect(self):
- self.load('loader/es_modules_http_indirect', name="app.js")
+ assert_basic_application()
- self.assert_basic_application()
- def test_node_es_modules_loader_websockets(self):
- self.load('loader/es_modules_websocket', name="app.mjs")
+def test_node_es_modules_loader_http_indirect():
+ client.load('loader/es_modules_http_indirect', name="app.js")
- message = 'blah'
+ assert_basic_application()
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+def test_node_es_modules_loader_websockets():
+ client.load('loader/es_modules_websocket', name="app.mjs")
- assert message == frame['data'].decode('utf-8'), 'mirror'
+ message = 'blah'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+ _, sock, _ = ws.upgrade()
- assert message == frame['data'].decode('utf-8'), 'mirror 2'
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- sock.close()
+ assert message == frame['data'].decode('utf-8'), 'mirror'
+
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
+
+ assert message == frame['data'].decode('utf-8'), 'mirror 2'
+
+ sock.close()
diff --git a/test/test_node_websockets.py b/test/test_node_websockets.py
index f1767cac..d26452aa 100644
--- a/test/test_node_websockets.py
+++ b/test/test_node_websockets.py
@@ -2,1427 +2,1433 @@ import struct
import time
import pytest
-from unit.applications.lang.node import TestApplicationNode
-from unit.applications.websockets import TestApplicationWebsocket
-from unit.option import option
+from unit.applications.lang.node import ApplicationNode
+from unit.applications.websockets import ApplicationWebsocket
+prerequisites = {'modules': {'node': 'any'}}
-class TestNodeWebsockets(TestApplicationNode):
- prerequisites = {'modules': {'node': 'any'}}
+client = ApplicationNode()
+ws = ApplicationWebsocket()
- ws = TestApplicationWebsocket()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request, skip_alert):
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
- ), 'clear keepalive_interval'
+@pytest.fixture(autouse=True)
+def setup_method_fixture(skip_alert):
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
+ ), 'clear keepalive_interval'
- skip_alert(r'socket close\(\d+\) failed')
+ skip_alert(r'socket close\(\d+\) failed')
- def close_connection(self, sock):
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+def close_connection(sock):
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- def check_close(self, sock, code=1000, no_close=False, frame=None):
- if frame == None:
- frame = self.ws.frame_read(sock)
+ check_close(sock)
- assert frame['fin'] == True, 'close fin'
- assert frame['opcode'] == self.ws.OP_CLOSE, 'close opcode'
- assert frame['code'] == code, 'close code'
- if not no_close:
- sock.close()
+def check_close(sock, code=1000, no_close=False, frame=None):
+ if frame is None:
+ frame = ws.frame_read(sock)
- def check_frame(self, frame, fin, opcode, payload, decode=True):
- if opcode == self.ws.OP_BINARY or not decode:
- data = frame['data']
- else:
- data = frame['data'].decode('utf-8')
+ assert frame['fin'], 'close fin'
+ assert frame['opcode'] == ws.OP_CLOSE, 'close opcode'
+ assert frame['code'] == code, 'close code'
- assert frame['fin'] == fin, 'fin'
- assert frame['opcode'] == opcode, 'opcode'
- assert data == payload, 'payload'
+ if not no_close:
+ sock.close()
- def test_node_websockets_handshake(self):
- self.load('websockets/mirror')
- resp, sock, key = self.ws.upgrade()
- sock.close()
+def check_frame(frame, fin, opcode, payload, decode=True):
+ if opcode == ws.OP_BINARY or not decode:
+ data = frame['data']
+ else:
+ data = frame['data'].decode('utf-8')
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+ assert frame['fin'] == fin, 'fin'
+ assert frame['opcode'] == opcode, 'opcode'
+ assert data == payload, 'payload'
- def test_node_websockets_mirror(self):
- self.load('websockets/mirror')
- message = 'blah'
+def test_node_websockets_handshake():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ resp, sock, key = ws.upgrade()
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- assert message == frame['data'].decode('utf-8'), 'mirror'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
- frame = self.ws.frame_read(sock)
+def test_node_websockets_mirror():
+ client.load('websockets/mirror')
- assert message == frame['data'].decode('utf-8'), 'mirror 2'
+ message = 'blah'
- sock.close()
+ _, sock, _ = ws.upgrade()
- def test_node_websockets_no_mask(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- message = 'blah'
+ assert message == frame['data'].decode('utf-8'), 'mirror'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, message)
+ frame = ws.frame_read(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, mask=False)
+ assert message == frame['data'].decode('utf-8'), 'mirror 2'
- frame = self.ws.frame_read(sock)
+ sock.close()
- assert frame['opcode'] == self.ws.OP_CLOSE, 'no mask opcode'
- assert frame['code'] == 1002, 'no mask close code'
- sock.close()
+def test_node_websockets_no_mask():
+ client.load('websockets/mirror')
- def test_node_websockets_fragmentation(self):
- self.load('websockets/mirror')
+ message = 'blah'
- message = 'blah'
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, message, mask=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, message, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, ' ', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, message)
+ frame = ws.frame_read(sock)
- frame = self.ws.frame_read(sock)
+ assert frame['opcode'] == ws.OP_CLOSE, 'no mask opcode'
+ assert frame['code'] == 1002, 'no mask close code'
- assert f'{message} {message}' == frame['data'].decode(
- 'utf-8'
- ), 'mirror framing'
+ sock.close()
- sock.close()
- def test_node_websockets_frame_fragmentation_invalid(self):
- self.load('websockets/mirror')
+def test_node_websockets_fragmentation():
+ client.load('websockets/mirror')
- message = 'blah'
+ message = 'blah'
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PING, message, fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, message, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, ' ', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, message)
- frame = self.ws.frame_read(sock)
+ frame = ws.frame_read(sock)
- frame.pop('data')
- assert frame == {
- 'fin': True,
- 'rsv1': False,
- 'rsv2': False,
- 'rsv3': False,
- 'opcode': self.ws.OP_CLOSE,
- 'mask': 0,
- 'code': 1002,
- 'reason': 'Fragmented control frame',
- }, 'close frame'
+ assert f'{message} {message}' == frame['data'].decode(
+ 'utf-8'
+ ), 'mirror framing'
- sock.close()
+ sock.close()
- def test_node_websockets_large(self):
- self.load('websockets/mirror_fragmentation')
- message = '0123456789' * 3000
+def test_node_websockets_frame_fragmentation_invalid():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ message = 'blah'
- self.ws.frame_write(sock, self.ws.OP_TEXT, message)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- data = frame['data'].decode('utf-8')
+ ws.frame_write(sock, ws.OP_PING, message, fin=False)
- frame = self.ws.frame_read(sock)
- data += frame['data'].decode('utf-8')
+ frame = ws.frame_read(sock)
- assert message == data, 'large'
+ frame.pop('data')
+ assert frame == {
+ 'fin': True,
+ 'rsv1': False,
+ 'rsv2': False,
+ 'rsv3': False,
+ 'opcode': ws.OP_CLOSE,
+ 'mask': 0,
+ 'code': 1002,
+ 'reason': 'Fragmented control frame',
+ }, 'close frame'
- sock.close()
+ sock.close()
- def test_node_websockets_two_clients(self):
- self.load('websockets/mirror')
- message1 = 'blah1'
- message2 = 'blah2'
+def test_node_websockets_large():
+ client.load('websockets/mirror_fragmentation')
- _, sock1, _ = self.ws.upgrade()
- _, sock2, _ = self.ws.upgrade()
+ message = '0123456789' * 3000
- self.ws.frame_write(sock1, self.ws.OP_TEXT, message1)
- self.ws.frame_write(sock2, self.ws.OP_TEXT, message2)
+ _, sock, _ = ws.upgrade()
- frame1 = self.ws.frame_read(sock1)
- frame2 = self.ws.frame_read(sock2)
+ ws.frame_write(sock, ws.OP_TEXT, message)
- assert message1 == frame1['data'].decode('utf-8'), 'client 1'
- assert message2 == frame2['data'].decode('utf-8'), 'client 2'
+ frame = ws.frame_read(sock)
+ data = frame['data'].decode('utf-8')
- sock1.close()
- sock2.close()
+ frame = ws.frame_read(sock)
+ data += frame['data'].decode('utf-8')
- @pytest.mark.skip('not yet')
- def test_node_websockets_handshake_upgrade_absent(
- self,
- ): # FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
- self.load('websockets/mirror')
+ assert message == data, 'large'
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
+ sock.close()
- assert resp['status'] == 400, 'upgrade absent'
- def test_node_websockets_handshake_case_insensitive(self):
- self.load('websockets/mirror')
+def test_node_websockets_two_clients():
+ client.load('websockets/mirror')
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'WEBSOCKET',
- 'Connection': 'UPGRADE',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
+ message1 = 'blah1'
+ message2 = 'blah2'
- assert resp['status'] == 101, 'status'
-
- @pytest.mark.skip('not yet')
- def test_node_websockets_handshake_connection_absent(self): # FAIL
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_node_websockets_handshake_version_absent(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- },
- )
-
- assert resp['status'] == 426, 'status'
-
- @pytest.mark.skip('not yet')
- def test_node_websockets_handshake_key_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': '!',
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'key length'
-
- key = self.ws.key()
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': [key, key],
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert (
- resp['status'] == 400
- ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
-
- def test_node_websockets_handshake_method_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_node_websockets_handshake_http_10(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- http_10=True,
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_node_websockets_handshake_uri_invalid(self):
- self.load('websockets/mirror')
-
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': self.ws.key(),
- 'Sec-WebSocket-Protocol': 'chat',
- 'Sec-WebSocket-Version': 13,
- },
- url='!',
- )
-
- assert resp['status'] == 400, 'status'
-
- def test_node_websockets_protocol_absent(self):
- self.load('websockets/mirror')
-
- key = self.ws.key()
- resp, sock, _ = self.ws.upgrade(
- headers={
- 'Host': 'localhost',
- 'Upgrade': 'websocket',
- 'Connection': 'Upgrade',
- 'Sec-WebSocket-Key': key,
- 'Sec-WebSocket-Version': 13,
- }
- )
- sock.close()
+ _, sock1, _ = ws.upgrade()
+ _, sock2, _ = ws.upgrade()
- assert resp['status'] == 101, 'status'
- assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
- assert resp['headers']['Connection'] == 'Upgrade', 'connection'
- assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
- key
- ), 'key'
+ ws.frame_write(sock1, ws.OP_TEXT, message1)
+ ws.frame_write(sock2, ws.OP_TEXT, message2)
- # autobahn-testsuite
- #
- # Some following tests fail because of Unit does not support UTF-8
- # validation for websocket frames. It should be implemented
- # by application, if necessary.
+ frame1 = ws.frame_read(sock1)
+ frame2 = ws.frame_read(sock2)
- def test_node_websockets_1_1_1__1_1_8(self):
- self.load('websockets/mirror')
+ assert message1 == frame1['data'].decode('utf-8'), 'client 1'
+ assert message2 == frame2['data'].decode('utf-8'), 'client 2'
- opcode = self.ws.OP_TEXT
+ sock1.close()
+ sock2.close()
- _, sock, _ = self.ws.upgrade()
- def check_length(length, chopsize=None):
- payload = '*' * length
+# FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
+@pytest.mark.skip('not yet')
+def test_node_websockets_handshake_upgrade_absent():
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
+ assert resp['status'] == 400, 'upgrade absent'
- check_length(0) # 1_1_1
- check_length(125) # 1_1_2
- check_length(126) # 1_1_3
- check_length(127) # 1_1_4
- check_length(128) # 1_1_5
- check_length(65535) # 1_1_6
- check_length(65536) # 1_1_7
- check_length(65536, chopsize=997) # 1_1_8
- self.close_connection(sock)
+def test_node_websockets_handshake_case_insensitive():
+ client.load('websockets/mirror')
- def test_node_websockets_1_2_1__1_2_8(self):
- self.load('websockets/mirror')
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'WEBSOCKET',
+ 'Connection': 'UPGRADE',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
- opcode = self.ws.OP_BINARY
+ assert resp['status'] == 101, 'status'
- _, sock, _ = self.ws.upgrade()
- def check_length(length, chopsize=None):
- payload = b'\xfe' * length
+@pytest.mark.skip('not yet')
+def test_node_websockets_handshake_connection_absent(): # FAIL
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
- self.check_frame(frame, True, opcode, payload)
+ assert resp['status'] == 400, 'status'
- check_length(0) # 1_2_1
- check_length(125) # 1_2_2
- check_length(126) # 1_2_3
- check_length(127) # 1_2_4
- check_length(128) # 1_2_5
- check_length(65535) # 1_2_6
- check_length(65536) # 1_2_7
- check_length(65536, chopsize=997) # 1_2_8
- self.close_connection(sock)
+def test_node_websockets_handshake_version_absent():
+ client.load('websockets/mirror')
- def test_node_websockets_2_1__2_6(self):
- self.load('websockets/mirror')
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ },
+ )
+
+ assert resp['status'] == 426, 'status'
+
+
+@pytest.mark.skip('not yet')
+def test_node_websockets_handshake_key_invalid():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': '!',
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'key length'
+
+ key = ws.key()
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': [key, key],
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert (
+ resp['status'] == 400
+ ), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
+
+
+def test_node_websockets_handshake_method_invalid():
+ client.load('websockets/mirror')
+
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ )
+
+ assert resp['status'] == 400, 'status'
+
+
+def test_node_websockets_handshake_http_10():
+ client.load('websockets/mirror')
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ http_10=True,
+ )
+
+ assert resp['status'] == 400, 'status'
- op_ping = self.ws.OP_PING
- op_pong = self.ws.OP_PONG
- _, sock, _ = self.ws.upgrade()
+def test_node_websockets_handshake_uri_invalid():
+ client.load('websockets/mirror')
- def check_ping(payload, chopsize=None, decode=True):
- self.ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock)
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': ws.key(),
+ 'Sec-WebSocket-Protocol': 'chat',
+ 'Sec-WebSocket-Version': 13,
+ },
+ url='!',
+ )
- self.check_frame(frame, True, op_pong, payload, decode=decode)
+ assert resp['status'] == 400, 'status'
- check_ping('') # 2_1
- check_ping('Hello, world!') # 2_2
- check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
- check_ping(b'\xfe' * 125, decode=False) # 2_4
- check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
- self.close_connection(sock)
+def test_node_websockets_protocol_absent():
+ client.load('websockets/mirror')
- # 2_5
+ key = ws.key()
+ resp, sock, _ = ws.upgrade(
+ headers={
+ 'Host': 'localhost',
+ 'Upgrade': 'websocket',
+ 'Connection': 'Upgrade',
+ 'Sec-WebSocket-Key': key,
+ 'Sec-WebSocket-Version': 13,
+ }
+ )
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ assert resp['status'] == 101, 'status'
+ assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
+ assert resp['headers']['Connection'] == 'Upgrade', 'connection'
+ assert resp['headers']['Sec-WebSocket-Accept'] == ws.accept(key), 'key'
- self.ws.frame_write(sock, self.ws.OP_PING, b'\xfe' * 126)
- self.check_close(sock, 1002)
- def test_node_websockets_2_7__2_9(self):
- self.load('websockets/mirror')
+# autobahn-testsuite
+#
+# Some following tests fail because of Unit does not support UTF-8
+# validation for websocket frames. It should be implemented
+# by application, if necessary.
- # 2_7
- _, sock, _ = self.ws.upgrade()
+def test_node_websockets_1_1_1__1_1_8():
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, self.ws.OP_PONG, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_7'
+ opcode = ws.OP_TEXT
- # 2_8
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- assert self.recvall(sock, read_timeout=0.1) == b'', '2_8'
+ def check_length(length, chopsize=None):
+ payload = '*' * length
- # 2_9
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- payload = 'ping payload'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
- self.ws.frame_write(sock, self.ws.OP_PING, payload)
+ check_length(0) # 1_1_1
+ check_length(125) # 1_1_2
+ check_length(126) # 1_1_3
+ check_length(127) # 1_1_4
+ check_length(128) # 1_1_5
+ check_length(65535) # 1_1_6
+ check_length(65536) # 1_1_7
+ check_length(65536, chopsize=997) # 1_1_8
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, payload)
+ close_connection(sock)
- self.close_connection(sock)
- def test_node_websockets_2_10__2_11(self):
- self.load('websockets/mirror')
+def test_node_websockets_1_2_1__1_2_8():
+ client.load('websockets/mirror')
- # 2_10
+ opcode = ws.OP_BINARY
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- for i in range(0, 10):
- self.ws.frame_write(sock, self.ws.OP_PING, f'payload-{i}')
+ def check_length(length, chopsize=None):
+ payload = b'\xfe' * length
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
- # 2_11
+ check_frame(frame, True, opcode, payload)
- for i in range(0, 10):
- opcode = self.ws.OP_PING
- self.ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
+ check_length(0) # 1_2_1
+ check_length(125) # 1_2_2
+ check_length(126) # 1_2_3
+ check_length(127) # 1_2_4
+ check_length(128) # 1_2_5
+ check_length(65535) # 1_2_6
+ check_length(65536) # 1_2_7
+ check_length(65536, chopsize=997) # 1_2_8
- for i in range(0, 10):
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, f'payload-{i}')
+ close_connection(sock)
- self.close_connection(sock)
- @pytest.mark.skip('not yet')
- def test_node_websockets_3_1__3_7(self):
- self.load('websockets/mirror')
+def test_node_websockets_2_1__2_6():
+ client.load('websockets/mirror')
- payload = 'Hello, world!'
+ op_ping = ws.OP_PING
+ op_pong = ws.OP_PONG
- # 3_1
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ def check_ping(payload, chopsize=None, decode=True):
+ ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv1=True)
- self.check_close(sock, 1002)
+ check_frame(frame, True, op_pong, payload, decode=decode)
- # 3_2
+ check_ping('') # 2_1
+ check_ping('Hello, world!') # 2_2
+ check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
+ check_ping(b'\xfe' * 125, decode=False) # 2_4
+ check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv2=True)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 2_5
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- self.check_close(sock, 1002, no_close=True)
+ ws.frame_write(sock, ws.OP_PING, b'\xfe' * 126)
+ check_close(sock, 1002)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
- sock.close()
- # 3_3
+def test_node_websockets_2_7__2_9():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ # 2_7
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_PONG, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_7'
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv1=True, rsv2=True
- )
+ # 2_8
- self.check_close(sock, 1002, no_close=True)
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ assert client.recvall(sock, read_timeout=0.1) == b'', '2_8'
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
- sock.close()
+ # 2_9
- # 3_4
+ payload = 'ping payload'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_PONG, 'unsolicited pong payload')
+ ws.frame_write(sock, ws.OP_PING, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, payload, rsv3=True, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ close_connection(sock)
- self.check_close(sock, 1002, no_close=True)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
- sock.close()
+def test_node_websockets_2_10__2_11():
+ client.load('websockets/mirror')
- # 3_5
+ # 2_10
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock,
- self.ws.OP_BINARY,
- b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
- rsv1=True,
- rsv3=True,
- )
+ for i in range(0, 10):
+ ws.frame_write(sock, ws.OP_PING, f'payload-{i}')
- self.check_close(sock, 1002)
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- # 3_6
+ # 2_11
- _, sock, _ = self.ws.upgrade()
+ for i in range(0, 10):
+ opcode = ws.OP_PING
+ ws.frame_write(sock, opcode, f'payload-{i}', chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_PING, payload, rsv2=True, rsv3=True
- )
+ for i in range(0, 10):
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, f'payload-{i}')
- self.check_close(sock, 1002)
+ close_connection(sock)
- # 3_7
- _, sock, _ = self.ws.upgrade()
+@pytest.mark.skip('not yet')
+def test_node_websockets_3_1__3_7():
+ client.load('websockets/mirror')
- self.ws.frame_write(
- sock, self.ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True
- )
+ payload = 'Hello, world!'
- self.check_close(sock, 1002)
+ # 3_1
- def test_node_websockets_4_1_1__4_2_5(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- payload = 'Hello, world!'
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True)
+ check_close(sock, 1002)
- # 4_1_1
+ # 3_2
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, 0x03, '')
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv2=True)
+ ws.frame_write(sock, ws.OP_PING, '')
- # 4_1_2
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ check_close(sock, 1002, no_close=True)
- self.ws.frame_write(sock, 0x04, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
+ sock.close()
- # 4_1_3
+ # 3_3
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, 0x05, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv1=True, rsv2=True)
- self.check_close(sock, 1002)
+ check_close(sock, 1002, no_close=True)
- # 4_1_4
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 3_4
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_TEXT, payload, rsv3=True, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, 0x06, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.check_close(sock, 1002)
+ check_close(sock, 1002, no_close=True)
- # 4_1_5
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 3_5
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(
+ sock,
+ ws.OP_BINARY,
+ b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
+ rsv1=True,
+ rsv3=True,
+ )
- self.ws.frame_write(sock, 0x07, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ check_close(sock, 1002)
- self.check_close(sock, 1002)
+ # 3_6
- # 4_2_1
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_PING, payload, rsv2=True, rsv3=True)
- self.ws.frame_write(sock, 0x0B, '')
- self.check_close(sock, 1002)
+ check_close(sock, 1002)
- # 4_2_2
+ # 3_7
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, 0x0C, 'reserved opcode payload')
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True)
- # 4_2_3
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+def test_node_websockets_4_1_1__4_2_5():
+ client.load('websockets/mirror')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ payload = 'Hello, world!'
- self.ws.frame_write(sock, 0x0D, '')
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_1
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_4
+ ws.frame_write(sock, 0x03, '')
+ check_close(sock, 1002)
- _, sock, _ = self.ws.upgrade()
+ # 4_1_2
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, 0x04, 'reserved opcode payload')
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0E, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_3
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 4_2_5
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
+ ws.frame_write(sock, 0x05, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, 0x0F, payload, chopsize=1)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
+ # 4_1_4
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- def test_node_websockets_5_1__5_20(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- # 5_1
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, 0x06, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, self.ws.OP_PING, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ check_close(sock, 1002)
- # 5_2
+ # 4_1_5
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_PONG, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- # 5_3
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, 0x07, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_1
- # 5_4
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_4'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, 0x0B, '')
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_2
- # 5_5
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ ws.frame_write(sock, 0x0C, 'reserved opcode payload')
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_3
- # 5_6
+ _, sock, _ = ws.upgrade()
- ping_payload = 'ping payload'
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, 0x0D, '')
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ check_close(sock, 1002)
- # 5_7
+ # 4_2_4
- ping_payload = 'ping payload'
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_7'
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, 0x0E, payload)
+ ws.frame_write(sock, ws.OP_PING, '')
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ # 4_2_5
- # 5_8
+ _, sock, _ = ws.upgrade()
- ping_payload = 'ping payload'
+ ws.frame_write(sock, ws.OP_TEXT, payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
- )
- self.ws.frame_write(sock, self.ws.OP_PING, ping_payload, chopsize=1)
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
- )
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
+ ws.frame_write(sock, 0x0F, payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
+ check_close(sock, 1002)
- # 5_9
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+def test_node_websockets_5_1__5_20():
+ client.load('websockets/mirror')
- # 5_10
+ # 5_1
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_PING, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- # 5_11
+ # 5_2
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=True,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_PONG, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ check_close(sock, 1002)
- # 5_12
+ # 5_3
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- # 5_13
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_4
- self.ws.frame_write(
- sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
- )
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_4'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- # 5_14
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_5
- self.ws.frame_write(
- sock,
- self.ws.OP_CONT,
- 'non-continuation payload',
- fin=False,
- chopsize=1,
- )
- self.ws.frame_write(
- sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
- )
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- # 5_15
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_6
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment4', fin=True)
+ ping_payload = 'ping payload'
- frame = self.ws.frame_read(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- if frame['opcode'] == self.ws.OP_TEXT:
- self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
- frame = None
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- self.check_close(sock, 1002, frame=frame)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- # 5_16
+ # 5_7
- _, sock, _ = self.ws.upgrade()
+ ping_payload = 'ping payload'
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_7'
- # 5_17
+ ws.frame_write(sock, ws.OP_PING, ping_payload)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- for i in range(0, 2):
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=True)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
- self.check_close(sock, 1002)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
- # 5_18
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- _, sock, _ = self.ws.upgrade()
+ # 5_8
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2')
- self.check_close(sock, 1002)
+ ping_payload = 'ping payload'
- # 5_19
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False, chopsize=1)
+ ws.frame_write(sock, ws.OP_PING, ping_payload, chopsize=1)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True, chopsize=1)
- _, sock, _ = self.ws.upgrade()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, ping_payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
- time.sleep(1)
+ # 5_9
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ # 5_10
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ _, sock, _ = ws.upgrade()
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- # 5_20
+ # 5_11
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=True,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- time.sleep(1)
+ # 5_12
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
- self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- assert self.recvall(sock, read_timeout=0.1) == b'', '5_20'
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
+ # 5_13
- self.check_frame(
- self.ws.frame_read(sock),
- True,
- self.ws.OP_TEXT,
- 'fragment1fragment2fragment3fragment4fragment5',
- )
+ _, sock, _ = ws.upgrade()
- self.close_connection(sock)
+ ws.frame_write(sock, ws.OP_CONT, 'non-continuation payload', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True)
+ check_close(sock, 1002)
- def test_node_websockets_6_1_1__6_4_4(self):
- self.load('websockets/mirror')
+ # 5_14
- # 6_1_1
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(
+ sock,
+ ws.OP_CONT,
+ 'non-continuation payload',
+ fin=False,
+ chopsize=1,
+ )
+ ws.frame_write(sock, ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1)
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_TEXT, '')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ # 5_15
- # 6_1_2
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=True)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment4', fin=True)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, '')
+ frame = ws.frame_read(sock)
- # 6_1_3
+ if frame['opcode'] == ws.OP_TEXT:
+ check_frame(frame, True, ws.OP_TEXT, 'fragment1fragment2')
+ frame = None
- payload = 'middle frame payload'
+ check_close(sock, 1002, frame=frame)
- self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload, fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, '')
+ # 5_16
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- # 6_2_1
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- payload = 'Hello-µ@ßöäüàá-UTF-8!!'
+ # 5_17
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ for _ in range(0, 2):
+ ws.frame_write(sock, ws.OP_CONT, 'fragment1', fin=True)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=True)
+ check_close(sock, 1002)
- # 6_2_2
+ # 5_18
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload[:12], fin=False)
- self.ws.frame_write(sock, self.ws.OP_CONT, payload[12:])
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment2')
+ check_close(sock, 1002)
- # 6_2_3
+ # 5_19
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
+ _, sock, _ = ws.upgrade()
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- # 6_2_4
+ time.sleep(1)
- payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- self.close_connection(sock)
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- # Unit does not support UTF-8 validation
- #
- # # 6_3_1 FAIL
- #
- # payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
- # payload_2 = '\xed\xa0\x80'
- # payload_3 = '\x65\x64\x69\x74\x65\x64'
- #
- # payload = payload_1 + payload_2 + payload_3
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload)
- # self.check_close(sock, 1007)
- #
- # # 6_3_2 FAIL
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
- # self.check_close(sock, 1007)
- #
- # # 6_4_1 ... 6_4_4 FAIL
+ # 5_20
- def test_node_websockets_7_1_1__7_5_1(self):
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 1!')
- # 7_1_1
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 1!')
- _, sock, _ = self.ws.upgrade()
+ time.sleep(1)
- payload = "Hello World!"
+ ws.frame_write(sock, ws.OP_CONT, 'fragment3', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, 'fragment4', fin=False)
+ ws.frame_write(sock, ws.OP_PING, 'pongme 2!')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PONG, 'pongme 2!')
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+ assert client.recvall(sock, read_timeout=0.1) == b'', '5_20'
+ ws.frame_write(sock, ws.OP_CONT, 'fragment5')
- self.close_connection(sock)
+ check_frame(
+ ws.frame_read(sock),
+ True,
+ ws.OP_TEXT,
+ 'fragment1fragment2fragment3fragment4fragment5',
+ )
- # 7_1_2
+ close_connection(sock)
- _, sock, _ = self.ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+def test_node_websockets_6_1_1__6_4_4():
+ client.load('websockets/mirror')
- self.check_close(sock)
+ # 6_1_1
- # 7_1_3
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, '')
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, '')
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ # 6_1_2
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- sock.close()
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, '')
- # 7_1_4
+ # 6_1_3
- _, sock, _ = self.ws.upgrade()
+ payload = 'middle frame payload'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ ws.frame_write(sock, ws.OP_TEXT, '', fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload, fin=False)
+ ws.frame_write(sock, ws.OP_CONT, '')
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- sock.close()
+ # 6_2_1
- # 7_1_5
+ payload = 'Hello-µ@ßöäüàá-UTF-8!!'
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock, no_close=True)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ # 6_2_2
- sock.close()
+ ws.frame_write(sock, ws.OP_TEXT, payload[:12], fin=False)
+ ws.frame_write(sock, ws.OP_CONT, payload[12:])
- # 7_1_6
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ # 6_2_3
- self.ws.frame_write(sock, self.ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
- self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
- self.recvall(sock, read_timeout=1)
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- self.ws.frame_write(sock, self.ws.OP_PING, '')
- assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
+ # 6_2_4
- sock.close()
+ payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
+
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
+
+ close_connection(sock)
+
+
+# Unit does not support UTF-8 validation
+#
+# # 6_3_1 FAIL
+#
+# payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
+# payload_2 = '\xed\xa0\x80'
+# payload_3 = '\x65\x64\x69\x74\x65\x64'
+#
+# payload = payload_1 + payload_2 + payload_3
+#
+# ws.message(sock, ws.OP_TEXT, payload)
+# check_close(sock, 1007)
+#
+# # 6_3_2 FAIL
+#
+# _, sock, _ = ws.upgrade()
+#
+# ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1)
+# check_close(sock, 1007)
+#
+# # 6_4_1 ... 6_4_4 FAIL
+
+
+def test_node_websockets_7_1_1__7_5_1():
+ client.load('websockets/mirror')
+
+ # 7_1_1
+
+ _, sock, _ = ws.upgrade()
+
+ payload = "Hello World!"
+
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
+
+ close_connection(sock)
+
+ # 7_1_2
+
+ _, sock, _ = ws.upgrade()
+
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+
+ check_close(sock)
- # 7_3_1
+ # 7_1_3
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, '')
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- # 7_3_2
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- _, sock, _ = self.ws.upgrade()
+ sock.close()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, 'a')
- self.check_close(sock, 1002)
+ # 7_1_4
- # 7_3_3
+ _, sock, _ = ws.upgrade()
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- # 7_3_4
+ sock.close()
- _, sock, _ = self.ws.upgrade()
+ # 7_1_5
- payload = self.ws.serialize_close(reason='Hello World!')
+ _, sock, _ = ws.upgrade()
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ ws.frame_write(sock, ws.OP_TEXT, 'fragment1', fin=False)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock, no_close=True)
- # 7_3_5
+ ws.frame_write(sock, ws.OP_CONT, 'fragment2')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- _, sock, _ = self.ws.upgrade()
+ sock.close()
- payload = self.ws.serialize_close(reason='*' * 123)
+ # 7_1_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ _, sock, _ = ws.upgrade()
- # 7_3_6
+ ws.frame_write(sock, ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2**10)
+ ws.frame_write(sock, ws.OP_TEXT, payload)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
- _, sock, _ = self.ws.upgrade()
+ client.recvall(sock, read_timeout=1)
- payload = self.ws.serialize_close(reason='*' * 124)
+ ws.frame_write(sock, ws.OP_PING, '')
+ assert client.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ sock.close()
- # # 7_5_1 FAIL Unit does not support UTF-8 validation
- #
- # _, sock, _ = self.ws.upgrade()
- #
- # payload = self.ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
- # '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
- #
- # self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- # self.check_close(sock, 1007)
+ # 7_3_1
- def test_node_websockets_7_7_X__7_9_X(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- valid_codes = [
- 1000,
- 1001,
- 1002,
- 1003,
- 1007,
- 1008,
- 1009,
- 1010,
- 1011,
- 3000,
- 3999,
- 4000,
- 4999,
- ]
+ ws.frame_write(sock, ws.OP_CLOSE, '')
+ check_close(sock)
- invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+ # 7_3_2
- for code in valid_codes:
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- payload = self.ws.serialize_close(code=code)
+ ws.frame_write(sock, ws.OP_CLOSE, 'a')
+ check_close(sock, 1002)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock)
+ # 7_3_3
- for code in invalid_codes:
- _, sock, _ = self.ws.upgrade()
+ _, sock, _ = ws.upgrade()
- payload = self.ws.serialize_close(code=code)
+ ws.frame_write(sock, ws.OP_CLOSE, ws.serialize_close())
+ check_close(sock)
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ # 7_3_4
- def test_node_websockets_7_13_1__7_13_2(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- # 7_13_1
+ payload = ws.serialize_close(reason='Hello World!')
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- payload = self.ws.serialize_close(code=5000)
+ # 7_3_5
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- # 7_13_2
+ payload = ws.serialize_close(reason='*' * 123)
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
- payload = struct.pack('!I', 65536) + ''.encode('utf-8')
+ # 7_3_6
- self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
- self.check_close(sock, 1002)
+ _, sock, _ = ws.upgrade()
- def test_node_websockets_9_1_1__9_6_6(self, is_unsafe):
- if not is_unsafe:
- pytest.skip('unsafe, long run')
+ payload = ws.serialize_close(reason='*' * 124)
- self.load('websockets/mirror')
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
- assert 'success' in self.conf(
- {
- 'http': {
- 'websocket': {
- 'max_frame_size': 33554432,
- 'keepalive_interval': 0,
- }
+
+# # 7_5_1 FAIL Unit does not support UTF-8 validation
+#
+# _, sock, _ = ws.upgrade()
+#
+# payload = ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
+# '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
+#
+# ws.frame_write(sock, ws.OP_CLOSE, payload)
+# check_close(sock, 1007)
+
+
+def test_node_websockets_7_7_X__7_9_X():
+ client.load('websockets/mirror')
+
+ valid_codes = [
+ 1000,
+ 1001,
+ 1002,
+ 1003,
+ 1007,
+ 1008,
+ 1009,
+ 1010,
+ 1011,
+ 3000,
+ 3999,
+ 4000,
+ 4999,
+ ]
+
+ invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
+
+ for code in valid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock)
+
+ for code in invalid_codes:
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=code)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_node_websockets_7_13_1__7_13_2():
+ client.load('websockets/mirror')
+
+ # 7_13_1
+
+ _, sock, _ = ws.upgrade()
+
+ payload = ws.serialize_close(code=5000)
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+ # 7_13_2
+
+ _, sock, _ = ws.upgrade()
+
+ payload = struct.pack('!I', 65536) + ''.encode('utf-8')
+
+ ws.frame_write(sock, ws.OP_CLOSE, payload)
+ check_close(sock, 1002)
+
+
+def test_node_websockets_9_1_1__9_6_6(is_unsafe, system):
+ if not is_unsafe:
+ pytest.skip('unsafe, long run')
+
+ client.load('websockets/mirror')
+
+ assert 'success' in client.conf(
+ {
+ 'http': {
+ 'websocket': {
+ 'max_frame_size': 33554432,
+ 'keepalive_interval': 0,
}
- },
- 'settings',
- ), 'increase max_frame_size and keepalive_interval'
-
- _, sock, _ = self.ws.upgrade()
-
- op_text = self.ws.OP_TEXT
- op_binary = self.ws.OP_BINARY
-
- def check_payload(opcode, length, chopsize=None):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * length
- else:
- payload = b'*' * length
+ }
+ },
+ 'settings',
+ ), 'increase max_frame_size and keepalive_interval'
- self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
- frame = self.ws.frame_read(sock, read_timeout=5)
- self.check_frame(frame, True, opcode, payload)
+ _, sock, _ = ws.upgrade()
- def check_message(opcode, f_size):
- if opcode == self.ws.OP_TEXT:
- payload = '*' * 4 * 2**20
- else:
- payload = b'*' * 4 * 2**20
+ op_text = ws.OP_TEXT
+ op_binary = ws.OP_BINARY
- self.ws.message(sock, opcode, payload, fragmention_size=f_size)
- frame = self.ws.frame_read(sock, read_timeout=5)
- self.check_frame(frame, True, opcode, payload)
+ def check_payload(opcode, length, chopsize=None):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * length
+ else:
+ payload = b'*' * length
- check_payload(op_text, 64 * 2**10) # 9_1_1
- check_payload(op_text, 256 * 2**10) # 9_1_2
- check_payload(op_text, 2**20) # 9_1_3
- check_payload(op_text, 4 * 2**20) # 9_1_4
- check_payload(op_text, 8 * 2**20) # 9_1_5
- check_payload(op_text, 16 * 2**20) # 9_1_6
+ ws.frame_write(sock, opcode, payload, chopsize=chopsize)
+ frame = ws.frame_read(sock, read_timeout=5)
+ check_frame(frame, True, opcode, payload)
- check_payload(op_binary, 64 * 2**10) # 9_2_1
- check_payload(op_binary, 256 * 2**10) # 9_2_2
- check_payload(op_binary, 2**20) # 9_2_3
- check_payload(op_binary, 4 * 2**20) # 9_2_4
- check_payload(op_binary, 8 * 2**20) # 9_2_5
- check_payload(op_binary, 16 * 2**20) # 9_2_6
+ def check_message(opcode, f_size):
+ if opcode == ws.OP_TEXT:
+ payload = '*' * 4 * 2**20
+ else:
+ payload = b'*' * 4 * 2**20
- if option.system != 'Darwin' and option.system != 'FreeBSD':
- check_message(op_text, 64) # 9_3_1
- check_message(op_text, 256) # 9_3_2
- check_message(op_text, 2**10) # 9_3_3
- check_message(op_text, 4 * 2**10) # 9_3_4
- check_message(op_text, 16 * 2**10) # 9_3_5
- check_message(op_text, 64 * 2**10) # 9_3_6
- check_message(op_text, 256 * 2**10) # 9_3_7
- check_message(op_text, 2**20) # 9_3_8
- check_message(op_text, 4 * 2**20) # 9_3_9
+ ws.message(sock, opcode, payload, fragmention_size=f_size)
+ frame = ws.frame_read(sock, read_timeout=5)
+ check_frame(frame, True, opcode, payload)
- check_message(op_binary, 64) # 9_4_1
- check_message(op_binary, 256) # 9_4_2
- check_message(op_binary, 2**10) # 9_4_3
- check_message(op_binary, 4 * 2**10) # 9_4_4
- check_message(op_binary, 16 * 2**10) # 9_4_5
- check_message(op_binary, 64 * 2**10) # 9_4_6
- check_message(op_binary, 256 * 2**10) # 9_4_7
- check_message(op_binary, 2**20) # 9_4_8
- check_message(op_binary, 4 * 2**20) # 9_4_9
+ check_payload(op_text, 64 * 2**10) # 9_1_1
+ check_payload(op_text, 256 * 2**10) # 9_1_2
+ check_payload(op_text, 2**20) # 9_1_3
+ check_payload(op_text, 4 * 2**20) # 9_1_4
+ check_payload(op_text, 8 * 2**20) # 9_1_5
+ check_payload(op_text, 16 * 2**20) # 9_1_6
- check_payload(op_text, 2**20, chopsize=64) # 9_5_1
- check_payload(op_text, 2**20, chopsize=128) # 9_5_2
- check_payload(op_text, 2**20, chopsize=256) # 9_5_3
- check_payload(op_text, 2**20, chopsize=512) # 9_5_4
- check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
- check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
+ check_payload(op_binary, 64 * 2**10) # 9_2_1
+ check_payload(op_binary, 256 * 2**10) # 9_2_2
+ check_payload(op_binary, 2**20) # 9_2_3
+ check_payload(op_binary, 4 * 2**20) # 9_2_4
+ check_payload(op_binary, 8 * 2**20) # 9_2_5
+ check_payload(op_binary, 16 * 2**20) # 9_2_6
- check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
- check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
- check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
- check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
- check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
- check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
+ if system not in ['Darwin', 'FreeBSD']:
+ check_message(op_text, 64) # 9_3_1
+ check_message(op_text, 256) # 9_3_2
+ check_message(op_text, 2**10) # 9_3_3
+ check_message(op_text, 4 * 2**10) # 9_3_4
+ check_message(op_text, 16 * 2**10) # 9_3_5
+ check_message(op_text, 64 * 2**10) # 9_3_6
+ check_message(op_text, 256 * 2**10) # 9_3_7
+ check_message(op_text, 2**20) # 9_3_8
+ check_message(op_text, 4 * 2**20) # 9_3_9
- self.close_connection(sock)
+ check_message(op_binary, 64) # 9_4_1
+ check_message(op_binary, 256) # 9_4_2
+ check_message(op_binary, 2**10) # 9_4_3
+ check_message(op_binary, 4 * 2**10) # 9_4_4
+ check_message(op_binary, 16 * 2**10) # 9_4_5
+ check_message(op_binary, 64 * 2**10) # 9_4_6
+ check_message(op_binary, 256 * 2**10) # 9_4_7
+ check_message(op_binary, 2**20) # 9_4_8
+ check_message(op_binary, 4 * 2**20) # 9_4_9
- def test_node_websockets_10_1_1(self):
- self.load('websockets/mirror')
+ check_payload(op_text, 2**20, chopsize=64) # 9_5_1
+ check_payload(op_text, 2**20, chopsize=128) # 9_5_2
+ check_payload(op_text, 2**20, chopsize=256) # 9_5_3
+ check_payload(op_text, 2**20, chopsize=512) # 9_5_4
+ check_payload(op_text, 2**20, chopsize=1024) # 9_5_5
+ check_payload(op_text, 2**20, chopsize=2048) # 9_5_6
- _, sock, _ = self.ws.upgrade()
+ check_payload(op_binary, 2**20, chopsize=64) # 9_6_1
+ check_payload(op_binary, 2**20, chopsize=128) # 9_6_2
+ check_payload(op_binary, 2**20, chopsize=256) # 9_6_3
+ check_payload(op_binary, 2**20, chopsize=512) # 9_6_4
+ check_payload(op_binary, 2**20, chopsize=1024) # 9_6_5
+ check_payload(op_binary, 2**20, chopsize=2048) # 9_6_6
- payload = '*' * 65536
+ close_connection(sock)
- self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1300)
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_TEXT, payload)
+def test_node_websockets_10_1_1():
+ client.load('websockets/mirror')
- self.close_connection(sock)
+ _, sock, _ = ws.upgrade()
- # settings
+ payload = '*' * 65536
- def test_node_websockets_max_frame_size(self):
- self.load('websockets/mirror')
+ ws.message(sock, ws.OP_TEXT, payload, fragmention_size=1300)
- assert 'success' in self.conf(
- {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
- ), 'configure max_frame_size'
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_TEXT, payload)
- _, sock, _ = self.ws.upgrade()
+ close_connection(sock)
- payload = '*' * 94
- opcode = self.ws.OP_TEXT
- self.ws.frame_write(sock, opcode, payload) # frame length is 100
+# settings
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, opcode, payload)
- payload = '*' * 95
+def test_node_websockets_max_frame_size():
+ client.load('websockets/mirror')
- self.ws.frame_write(sock, opcode, payload) # frame length is 101
- self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
+ ), 'configure max_frame_size'
- def test_node_websockets_read_timeout(self):
- self.load('websockets/mirror')
+ _, sock, _ = ws.upgrade()
- assert 'success' in self.conf(
- {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
- ), 'configure read_timeout'
+ payload = '*' * 94
+ opcode = ws.OP_TEXT
- _, sock, _ = self.ws.upgrade()
+ ws.frame_write(sock, opcode, payload) # frame length is 100
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, opcode, payload)
- time.sleep(2)
+ payload = '*' * 95
- self.check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
+ ws.frame_write(sock, opcode, payload) # frame length is 101
+ check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
- def test_node_websockets_keepalive_interval(self):
- self.load('websockets/mirror')
- assert 'success' in self.conf(
- {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
- ), 'configure keepalive_interval'
+def test_node_websockets_read_timeout():
+ client.load('websockets/mirror')
- _, sock, _ = self.ws.upgrade()
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'read_timeout': 5}}}, 'settings'
+ ), 'configure read_timeout'
- frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
- sock.sendall(frame[:2])
+ _, sock, _ = ws.upgrade()
- time.sleep(2)
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
- frame = self.ws.frame_read(sock)
- self.check_frame(frame, True, self.ws.OP_PING, '') # PING frame
+ time.sleep(2)
- sock.close()
+ check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
+
+
+def test_node_websockets_keepalive_interval():
+ client.load('websockets/mirror')
+
+ assert 'success' in client.conf(
+ {'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
+ ), 'configure keepalive_interval'
+
+ _, sock, _ = ws.upgrade()
+
+ frame = ws.frame_to_send(ws.OP_TEXT, 'blah')
+ sock.sendall(frame[:2])
+
+ time.sleep(2)
+
+ frame = ws.frame_read(sock)
+ check_frame(frame, True, ws.OP_PING, '') # PING frame
+
+ sock.close()
diff --git a/test/test_perl_application.py b/test/test_perl_application.py
index 3c327aa1..7e6571fb 100644
--- a/test/test_perl_application.py
+++ b/test/test_perl_application.py
@@ -1,295 +1,318 @@
import re
import pytest
-from unit.applications.lang.perl import TestApplicationPerl
+from unit.applications.lang.perl import ApplicationPerl
+prerequisites = {'modules': {'perl': 'all'}}
-class TestPerlApplication(TestApplicationPerl):
- prerequisites = {'modules': {'perl': 'all'}}
+client = ApplicationPerl()
- def test_perl_application(self):
- self.load('variables')
- body = 'Test body string.'
+def test_perl_application(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': 'blah',
- 'Connection': 'close',
- },
- body=body,
- )
+ body = 'Test body string.'
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
- assert (
- headers.pop('Server-Software') == header_server
- ), 'server software header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- assert headers == {
- 'Connection': 'close',
- 'Content-Length': str(len(body)),
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
'Content-Type': 'text/html',
- 'Request-Method': 'POST',
- 'Request-Uri': '/',
- 'Http-Host': 'localhost',
- 'Server-Protocol': 'HTTP/1.1',
'Custom-Header': 'blah',
- 'Psgi-Version': '11',
- 'Psgi-Url-Scheme': 'http',
- 'Psgi-Multithread': '',
- 'Psgi-Multiprocess': '1',
- 'Psgi-Run-Once': '',
- 'Psgi-Nonblocking': '',
- 'Psgi-Streaming': '1',
- }, 'headers'
- assert resp['body'] == body, 'body'
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- def test_perl_application_query_string(self):
- self.load('query_string')
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+ assert (
+ headers.pop('Server-Software') == header_server
+ ), 'server software header'
- resp = self.get(url='/?var1=val1&var2=val2')
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String header'
+ assert headers == {
+ 'Connection': 'close',
+ 'Content-Length': str(len(body)),
+ 'Content-Type': 'text/html',
+ 'Request-Method': 'POST',
+ 'Request-Uri': '/',
+ 'Http-Host': 'localhost',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Custom-Header': 'blah',
+ 'Psgi-Version': '11',
+ 'Psgi-Url-Scheme': 'http',
+ 'Psgi-Multithread': '',
+ 'Psgi-Multiprocess': '1',
+ 'Psgi-Run-Once': '',
+ 'Psgi-Nonblocking': '',
+ 'Psgi-Streaming': '1',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
- def test_perl_application_query_string_empty(self):
- self.load('query_string')
- resp = self.get(url='/?')
+def test_perl_application_query_string():
+ client.load('query_string')
- assert resp['status'] == 200, 'query string empty status'
- assert resp['headers']['Query-String'] == '', 'query string empty'
+ resp = client.get(url='/?var1=val1&var2=val2')
- def test_perl_application_query_string_absent(self):
- self.load('query_string')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String header'
- resp = self.get()
- assert resp['status'] == 200, 'query string absent status'
- assert resp['headers']['Query-String'] == '', 'query string absent'
+def test_perl_application_query_string_empty():
+ client.load('query_string')
- @pytest.mark.skip('not yet')
- def test_perl_application_server_port(self):
- self.load('server_port')
+ resp = client.get(url='/?')
- assert (
- self.get()['headers']['Server-Port'] == '7080'
- ), 'Server-Port header'
+ assert resp['status'] == 200, 'query string empty status'
+ assert resp['headers']['Query-String'] == '', 'query string empty'
- def test_perl_application_input_read_empty(self):
- self.load('input_read_empty')
- assert self.get()['body'] == '', 'read empty'
+def test_perl_application_query_string_absent():
+ client.load('query_string')
- def test_perl_application_input_read_parts(self):
- self.load('input_read_parts')
+ resp = client.get()
- assert (
- self.post(body='0123456789')['body'] == '0123456789'
- ), 'input read parts'
+ assert resp['status'] == 200, 'query string absent status'
+ assert resp['headers']['Query-String'] == '', 'query string absent'
- def test_perl_application_input_buffered_read(self):
- self.load('input_buffered_read')
- assert self.post(body='012345')['body'] == '012345', 'buffered read #1'
- assert (
- self.post(body='9876543210')['body'] == '9876543210'
- ), 'buffered read #2'
+@pytest.mark.skip('not yet')
+def test_perl_application_server_port():
+ client.load('server_port')
- def test_perl_application_input_close(self):
- self.load('input_close')
+ assert (
+ client.get()['headers']['Server-Port'] == '7080'
+ ), 'Server-Port header'
- assert self.post(body='012345')['body'] == '012345', 'input close #1'
- assert (
- self.post(body='9876543210')['body'] == '9876543210'
- ), 'input close #2'
- @pytest.mark.skip('not yet')
- def test_perl_application_input_read_offset(self):
- self.load('input_read_offset')
+def test_perl_application_input_read_empty():
+ client.load('input_read_empty')
- assert self.post(body='0123456789')['body'] == '4567', 'read offset'
+ assert client.get()['body'] == '', 'read empty'
- def test_perl_application_input_copy(self):
- self.load('input_copy')
- body = '0123456789'
- assert self.post(body=body)['body'] == body, 'input copy'
+def test_perl_application_input_read_parts():
+ client.load('input_read_parts')
- def test_perl_application_errors_print(self):
- self.load('errors_print')
+ assert (
+ client.post(body='0123456789')['body'] == '0123456789'
+ ), 'input read parts'
- assert self.get()['body'] == '1', 'errors result'
- assert (
- self.wait_for_record(r'\[error\].+Error in application') is not None
- ), 'errors print'
+def test_perl_application_input_buffered_read():
+ client.load('input_buffered_read')
- def test_perl_application_header_equal_names(self):
- self.load('header_equal_names')
+ assert client.post(body='012345')['body'] == '012345', 'buffered read #1'
+ assert (
+ client.post(body='9876543210')['body'] == '9876543210'
+ ), 'buffered read #2'
- assert self.get()['headers']['Set-Cookie'] == [
- 'tc=one,two,three',
- 'tc=four,five,six',
- ], 'header equal names'
- def test_perl_application_header_pairs(self):
- self.load('header_pairs')
+def test_perl_application_input_close():
+ client.load('input_close')
- assert self.get()['headers']['blah'] == 'blah', 'header pairs'
+ assert client.post(body='012345')['body'] == '012345', 'input close #1'
+ assert (
+ client.post(body='9876543210')['body'] == '9876543210'
+ ), 'input close #2'
- def test_perl_application_body_empty(self):
- self.load('body_empty')
- assert self.get()['body'] == '', 'body empty'
+@pytest.mark.skip('not yet')
+def test_perl_application_input_read_offset():
+ client.load('input_read_offset')
- def test_perl_application_body_array(self):
- self.load('body_array')
+ assert client.post(body='0123456789')['body'] == '4567', 'read offset'
- assert self.get()['body'] == '0123456789', 'body array'
- def test_perl_application_body_large(self):
- self.load('variables')
+def test_perl_application_input_copy():
+ client.load('input_copy')
- body = '0123456789' * 1000
+ body = '0123456789'
+ assert client.post(body=body)['body'] == body, 'input copy'
- resp = self.post(body=body)['body']
- assert resp == body, 'body large'
+def test_perl_application_errors_print(wait_for_record):
+ client.load('errors_print')
- def test_perl_application_body_io_empty(self):
- self.load('body_io_empty')
+ assert client.get()['body'] == '1', 'errors result'
- assert self.get()['status'] == 200, 'body io empty'
+ assert (
+ wait_for_record(r'\[error\].+Error in application') is not None
+ ), 'errors print'
- def test_perl_application_body_io_file(self):
- self.load('body_io_file')
- assert self.get()['body'] == 'body\n', 'body io file'
+def test_perl_application_header_equal_names():
+ client.load('header_equal_names')
- def test_perl_streaming_body_multiple_responses(self):
- self.load('streaming_body_multiple_responses')
+ assert client.get()['headers']['Set-Cookie'] == [
+ 'tc=one,two,three',
+ 'tc=four,five,six',
+ ], 'header equal names'
- assert self.get()['status'] == 200
- @pytest.mark.skip('not yet')
- def test_perl_application_syntax_error(self, skip_alert):
- skip_alert(r'PSGI: Failed to parse script')
- self.load('syntax_error')
+def test_perl_application_header_pairs():
+ client.load('header_pairs')
- assert self.get()['status'] == 500, 'syntax error'
+ assert client.get()['headers']['blah'] == 'blah', 'header pairs'
- def test_perl_keepalive_body(self):
- self.load('variables')
- assert self.get()['status'] == 200, 'init'
+def test_perl_application_body_empty():
+ client.load('body_empty')
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- 'Content-Type': 'text/html',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ assert client.get()['body'] == '', 'body empty'
- assert resp['body'] == body, 'keep-alive 1'
- body = '0123456789'
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'Content-Type': 'text/html',
- },
- sock=sock,
- body=body,
- )
+def test_perl_application_body_array():
+ client.load('body_array')
- assert resp['body'] == body, 'keep-alive 2'
+ assert client.get()['body'] == '0123456789', 'body array'
- def test_perl_body_io_fake(self):
- self.load('body_io_fake')
- assert self.get()['body'] == '21', 'body io fake'
+def test_perl_application_body_large():
+ client.load('variables')
- assert (
- self.wait_for_record(r'\[error\].+IOFake getline\(\) \$\/ is \d+')
- is not None
- ), 'body io fake $/ value'
+ body = '0123456789' * 1000
- assert (
- self.wait_for_record(r'\[error\].+IOFake close\(\) called')
- is not None
- ), 'body io fake close'
+ resp = client.post(body=body)['body']
- def test_perl_delayed_response(self):
- self.load('delayed_response')
+ assert resp == body, 'body large'
- resp = self.get()
- assert resp['status'] == 200, 'status'
- assert resp['body'] == 'Hello World!', 'body'
+def test_perl_application_body_io_empty():
+ client.load('body_io_empty')
- def test_perl_streaming_body(self):
- self.load('streaming_body')
+ assert client.get()['status'] == 200, 'body io empty'
- resp = self.get()
- assert resp['status'] == 200, 'status'
- assert resp['body'] == 'Hello World!', 'body'
+def test_perl_application_body_io_file():
+ client.load('body_io_file')
+
+ assert client.get()['body'] == 'body\n', 'body io file'
+
+
+def test_perl_streaming_body_multiple_responses():
+ client.load('streaming_body_multiple_responses')
+
+ assert client.get()['status'] == 200
+
+
+@pytest.mark.skip('not yet')
+def test_perl_application_syntax_error(skip_alert):
+ skip_alert(r'PSGI: Failed to parse script')
+ client.load('syntax_error')
+
+ assert client.get()['status'] == 500, 'syntax error'
+
- def test_perl_application_threads(self):
- self.load('threads')
+def test_perl_keepalive_body():
+ client.load('variables')
- assert 'success' in self.conf(
- '4', 'applications/threads/threads'
- ), 'configure 4 threads'
+ assert client.get()['status'] == 200, 'init'
+
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ 'Content-Type': 'text/html',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
+
+ assert resp['body'] == body, 'keep-alive 1'
+
+ body = '0123456789'
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ 'Content-Type': 'text/html',
+ },
+ sock=sock,
+ body=body,
+ )
+
+ assert resp['body'] == body, 'keep-alive 2'
+
+
+def test_perl_body_io_fake(wait_for_record):
+ client.load('body_io_fake')
+
+ assert client.get()['body'] == '21', 'body io fake'
+
+ assert (
+ wait_for_record(r'\[error\].+IOFake getline\(\) \$\/ is \d+')
+ is not None
+ ), 'body io fake $/ value'
+
+ assert (
+ wait_for_record(r'\[error\].+IOFake close\(\) called') is not None
+ ), 'body io fake close'
+
+
+def test_perl_delayed_response():
+ client.load('delayed_response')
+
+ resp = client.get()
+
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == 'Hello World!', 'body'
- socks = []
- for i in range(4):
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+def test_perl_streaming_body():
+ client.load('streaming_body')
- socks.append(sock)
+ resp = client.get()
- threads = set()
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == 'Hello World!', 'body'
- for sock in socks:
- resp = self.recvall(sock).decode('utf-8')
- self.log_in(resp)
+def test_perl_application_threads():
+ client.load('threads')
- resp = self._resp_to_dict(resp)
+ assert 'success' in client.conf(
+ '4', 'applications/threads/threads'
+ ), 'configure 4 threads'
- assert resp['status'] == 200, 'status'
+ socks = []
+
+ for _ in range(4):
+ sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+
+ socks.append(sock)
+
+ threads = set()
+
+ for sock in socks:
+ resp = client.recvall(sock).decode('utf-8')
+
+ client.log_in(resp)
+
+ resp = client._resp_to_dict(resp)
+
+ assert resp['status'] == 200, 'status'
- threads.add(resp['headers']['X-Thread'])
+ threads.add(resp['headers']['X-Thread'])
- assert resp['headers']['Psgi-Multithread'] == '1', 'multithread'
+ assert resp['headers']['Psgi-Multithread'] == '1', 'multithread'
- sock.close()
+ sock.close()
- assert len(socks) == len(threads), 'threads differs'
+ assert len(socks) == len(threads), 'threads differs'
diff --git a/test/test_php_application.py b/test/test_php_application.py
index 6e1d190a..6c1f227b 100644
--- a/test/test_php_application.py
+++ b/test/test_php_application.py
@@ -7,822 +7,853 @@ import time
from pathlib import Path
import pytest
-from unit.applications.lang.php import TestApplicationPHP
+from unit.applications.lang.php import ApplicationPHP
from unit.option import option
+prerequisites = {'modules': {'php': 'all'}}
-class TestPHPApplication(TestApplicationPHP):
- prerequisites = {'modules': {'php': 'all'}}
+client = ApplicationPHP()
- def before_disable_functions(self):
- body = self.get()['body']
- assert re.search(r'time: \d+', body), 'disable_functions before time'
- assert re.search(r'exec: \/\w+', body), 'disable_functions before exec'
+def before_disable_functions():
+ body = client.get()['body']
- def check_opcache(self):
- resp = self.get()
- assert resp['status'] == 200, 'status'
+ assert re.search(r'time: \d+', body), 'disable_functions before time'
+ assert re.search(r'exec: \/\w+', body), 'disable_functions before exec'
- headers = resp['headers']
- if 'X-OPcache' in headers and headers['X-OPcache'] == '-1':
- pytest.skip('opcache is not supported')
- return resp
+def check_opcache():
+ resp = client.get()
+ assert resp['status'] == 200, 'status'
- def set_opcache(self, app, val):
- assert 'success' in self.conf(
- {"admin": {"opcache.enable": val, "opcache.enable_cli": val}},
- f'applications/{app}/options',
- )
+ headers = resp['headers']
+ if 'X-OPcache' in headers and headers['X-OPcache'] == '-1':
+ pytest.skip('opcache is not supported')
- r = self.check_opcache()
- assert r['headers']['X-OPcache'] == val, 'opcache value'
+ return resp
- def set_preload(self, preload):
- with open(f'{option.temp_dir}/php.ini', 'w') as f:
- f.write(
- f"""opcache.preload = {option.test_dir}/php/opcache/preload\
-/{preload}
-opcache.preload_user = {option.user or getpass.getuser()}
-"""
- )
- assert 'success' in self.conf(
- {"file": f"{option.temp_dir}/php.ini"},
- 'applications/opcache/options',
- )
+def run_php_application_cwd_root_tests():
+ assert 'success' in client.conf_delete('applications/cwd/working_directory')
- def test_php_application_variables(self):
- self.load('variables')
+ script_cwd = f'{option.test_dir}/php/cwd'
- body = 'Test body string.'
+ resp = client.get()
+ assert resp['status'] == 200, 'status ok'
+ assert resp['body'] == script_cwd, 'default cwd'
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': 'blah',
- 'Connection': 'close',
- },
- body=body,
- url='/index.php/blah?var=val',
- )
+ assert 'success' in client.conf(
+ f'"{option.test_dir}"',
+ 'applications/cwd/working_directory',
+ )
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
- assert (
- headers.pop('Server-Software') == header_server
- ), 'server software header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- if 'X-Powered-By' in headers:
- headers.pop('X-Powered-By')
-
- headers.pop('Content-type')
- assert headers == {
- 'Connection': 'close',
- 'Content-Length': str(len(body)),
- 'Request-Method': 'POST',
- 'Path-Info': '/blah',
- 'Request-Uri': '/index.php/blah?var=val',
- 'Http-Host': 'localhost',
- 'Server-Protocol': 'HTTP/1.1',
- 'Custom-Header': 'blah',
- }, 'headers'
- assert resp['body'] == body, 'body'
+ resp = client.get()
+ assert resp['status'] == 200, 'status ok'
+ assert resp['body'] == script_cwd, 'wdir cwd'
- def test_php_application_query_string(self):
- self.load('query_string')
+ resp = client.get(url='/?chdir=/')
+ assert resp['status'] == 200, 'status ok'
+ assert resp['body'] == '/', 'cwd after chdir'
- resp = self.get(url='/?var1=val1&var2=val2')
+ # cwd must be restored
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'query string'
+ resp = client.get()
+ assert resp['status'] == 200, 'status ok'
+ assert resp['body'] == script_cwd, 'cwd restored'
- def test_php_application_query_string_empty(self):
- self.load('query_string')
+ resp = client.get(url='/subdir/')
+ assert resp['body'] == f'{script_cwd}/subdir', 'cwd subdir'
- resp = self.get(url='/?')
- assert resp['status'] == 200, 'query string empty status'
- assert resp['headers']['Query-String'] == '', 'query string empty'
+def run_php_application_cwd_script_tests():
+ client.load('cwd')
- def test_php_application_fastcgi_finish_request(self, unit_pid):
- self.load('fastcgi_finish_request')
+ script_cwd = f'{option.test_dir}/php/cwd'
- assert 'success' in self.conf(
- {"admin": {"auto_globals_jit": "1"}},
- 'applications/fastcgi_finish_request/options',
- )
+ assert 'success' in client.conf_delete('applications/cwd/working_directory')
- assert self.get()['body'] == '0123'
+ assert 'success' in client.conf('"index.php"', 'applications/cwd/script')
- os.kill(unit_pid, signal.SIGUSR1)
+ assert client.get()['body'] == script_cwd, 'default cwd'
- errs = self.findall(r'Error in fastcgi_finish_request')
+ assert client.get(url='/?chdir=/')['body'] == '/', 'cwd after chdir'
- assert len(errs) == 0, 'no error'
+ # cwd must be restored
+ assert client.get()['body'] == script_cwd, 'cwd restored'
- def test_php_application_fastcgi_finish_request_2(self, unit_pid):
- self.load('fastcgi_finish_request')
- assert 'success' in self.conf(
- {"admin": {"auto_globals_jit": "1"}},
- 'applications/fastcgi_finish_request/options',
+def set_opcache(app, val):
+ assert 'success' in client.conf(
+ {"admin": {"opcache.enable": val, "opcache.enable_cli": val}},
+ f'applications/{app}/options',
+ )
+
+ r = check_opcache()
+ assert r['headers']['X-OPcache'] == val, 'opcache value'
+
+
+def set_preload(preload):
+ with open(f'{option.temp_dir}/php.ini', 'w') as ini:
+ ini.write(
+ f"""opcache.preload = {option.test_dir}/php/opcache/preload\
+/{preload}
+opcache.preload_user = {option.user or getpass.getuser()}
+"""
)
- resp = self.get(url='/?skip')
- assert resp['status'] == 200
- assert resp['body'] == ''
+ assert 'success' in client.conf(
+ {"file": f"{option.temp_dir}/php.ini"},
+ 'applications/opcache/options',
+ )
- os.kill(unit_pid, signal.SIGUSR1)
- errs = self.findall(r'Error in fastcgi_finish_request')
+def test_php_application_variables(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- assert len(errs) == 0, 'no error'
+ body = 'Test body string.'
- def test_php_application_query_string_absent(self):
- self.load('query_string')
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Type': 'text/html',
+ 'Custom-Header': 'blah',
+ 'Connection': 'close',
+ },
+ body=body,
+ url='/index.php/blah?var=val',
+ )
- resp = self.get()
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+ assert (
+ headers.pop('Server-Software') == header_server
+ ), 'server software header'
- assert resp['status'] == 200, 'query string absent status'
- assert resp['headers']['Query-String'] == '', 'query string absent'
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
- def test_php_application_phpinfo(self):
- self.load('phpinfo')
+ if 'X-Powered-By' in headers:
+ headers.pop('X-Powered-By')
- resp = self.get()
+ headers.pop('Content-type')
+ assert headers == {
+ 'Connection': 'close',
+ 'Content-Length': str(len(body)),
+ 'Request-Method': 'POST',
+ 'Path-Info': '/blah',
+ 'Request-Uri': '/index.php/blah?var=val',
+ 'Http-Host': 'localhost',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Custom-Header': 'blah',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
- assert resp['status'] == 200, 'status'
- assert resp['body'] != '', 'body not empty'
- def test_php_application_header_status(self):
- self.load('header')
+def test_php_application_query_string():
+ client.load('query_string')
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'X-Header': 'HTTP/1.1 404 Not Found',
- }
- )['status']
- == 404
- ), 'status'
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'X-Header': 'http/1.1 404 Not Found',
- }
- )['status']
- == 404
- ), 'status case insensitive'
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'X-Header': 'HTTP/ 404 Not Found',
- }
- )['status']
- == 404
- ), 'status version empty'
+ resp = client.get(url='/?var1=val1&var2=val2')
- def test_php_application_404(self):
- self.load('404')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'query string'
- resp = self.get()
- assert resp['status'] == 404, '404 status'
- assert re.search(
- r'<title>404 Not Found</title>', resp['body']
- ), '404 body'
+def test_php_application_query_string_empty():
+ client.load('query_string')
- def test_php_application_keepalive_body(self):
- self.load('mirror')
+ resp = client.get(url='/?')
- assert self.get()['status'] == 200, 'init'
+ assert resp['status'] == 200, 'query string empty status'
+ assert resp['headers']['Query-String'] == '', 'query string empty'
+
+
+def test_php_application_fastcgi_finish_request(findall, unit_pid):
+ client.load('fastcgi_finish_request')
+
+ assert 'success' in client.conf(
+ {"admin": {"auto_globals_jit": "1"}},
+ 'applications/fastcgi_finish_request/options',
+ )
+
+ assert client.get()['body'] == '0123'
+
+ os.kill(unit_pid, signal.SIGUSR1)
+
+ errs = findall(r'Error in fastcgi_finish_request')
+
+ assert len(errs) == 0, 'no error'
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
- assert resp['body'] == body, 'keep-alive 1'
+def test_php_application_fastcgi_finish_request_2(findall, unit_pid):
+ client.load('fastcgi_finish_request')
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
+ assert 'success' in client.conf(
+ {"admin": {"auto_globals_jit": "1"}},
+ 'applications/fastcgi_finish_request/options',
+ )
- assert resp['body'] == body, 'keep-alive 2'
+ resp = client.get(url='/?skip')
+ assert resp['status'] == 200
+ assert resp['body'] == ''
- def test_php_application_conditional(self):
- self.load('conditional')
+ os.kill(unit_pid, signal.SIGUSR1)
- assert re.search(r'True', self.get()['body']), 'conditional true'
- assert re.search(r'False', self.post()['body']), 'conditional false'
+ errs = findall(r'Error in fastcgi_finish_request')
- def test_php_application_get_variables(self):
- self.load('get_variables')
+ assert len(errs) == 0, 'no error'
- resp = self.get(url='/?var1=val1&var2=&var3')
- assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
- assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
- assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
- assert resp['headers']['X-Var-4'] == 'not set', 'GET variables 4'
- def test_php_application_post_variables(self):
- self.load('post_variables')
+def test_php_application_query_string_absent():
+ client.load('query_string')
- resp = self.post(
+ resp = client.get()
+
+ assert resp['status'] == 200, 'query string absent status'
+ assert resp['headers']['Query-String'] == '', 'query string absent'
+
+
+def test_php_application_phpinfo():
+ client.load('phpinfo')
+
+ resp = client.get()
+
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] != '', 'body not empty'
+
+
+def test_php_application_header_status():
+ client.load('header')
+
+ assert (
+ client.get(
headers={
- 'Content-Type': 'application/x-www-form-urlencoded',
'Host': 'localhost',
'Connection': 'close',
- },
- body='var1=val1&var2=',
- )
- assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
- assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
- assert resp['headers']['X-Var-3'] == 'not set', 'POST variables 3'
+ 'X-Header': 'HTTP/1.1 404 Not Found',
+ }
+ )['status']
+ == 404
+ ), 'status'
- def test_php_application_cookies(self):
- self.load('cookies')
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ 'X-Header': 'http/1.1 404 Not Found',
+ }
+ )['status']
+ == 404
+ ), 'status case insensitive'
- resp = self.get(
+ assert (
+ client.get(
headers={
- 'Cookie': 'var=val; var2=val2',
'Host': 'localhost',
'Connection': 'close',
+ 'X-Header': 'HTTP/ 404 Not Found',
}
- )
+ )['status']
+ == 404
+ ), 'status version empty'
- assert resp['headers']['X-Cookie-1'] == 'val', 'cookie'
- assert resp['headers']['X-Cookie-2'] == 'val2', 'cookie'
- def test_php_application_ini_precision(self):
- self.load('ini_precision')
+def test_php_application_404():
+ client.load('404')
- assert self.get()['headers']['X-Precision'] != '4', 'ini value default'
+ resp = client.get()
- assert 'success' in self.conf(
- {"file": "ini/php.ini"}, 'applications/ini_precision/options'
- )
+ assert resp['status'] == 404, '404 status'
+ assert re.search(r'<title>404 Not Found</title>', resp['body']), '404 body'
- assert (
- self.get()['headers']['X-File']
- == f'{option.test_dir}/php/ini_precision/ini/php.ini'
- ), 'ini file'
- assert self.get()['headers']['X-Precision'] == '4', 'ini value'
- @pytest.mark.skip('not yet')
- def test_php_application_ini_admin_user(self):
- self.load('ini_precision')
+def test_php_application_keepalive_body():
+ client.load('mirror')
- assert 'error' in self.conf(
- {"user": {"precision": "4"}, "admin": {"precision": "5"}},
- 'applications/ini_precision/options',
- ), 'ini admin user'
+ assert client.get()['status'] == 200, 'init'
- def test_php_application_ini_admin(self):
- self.load('ini_precision')
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
- assert 'success' in self.conf(
- {"file": "ini/php.ini", "admin": {"precision": "5"}},
- 'applications/ini_precision/options',
- )
+ assert resp['body'] == body, 'keep-alive 1'
- assert (
- self.get()['headers']['X-File']
- == f'{option.test_dir}/php/ini_precision/ini/php.ini'
- ), 'ini file'
- assert self.get()['headers']['X-Precision'] == '5', 'ini value admin'
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
- def test_php_application_ini_user(self):
- self.load('ini_precision')
+ assert resp['body'] == body, 'keep-alive 2'
- assert 'success' in self.conf(
- {"file": "ini/php.ini", "user": {"precision": "5"}},
- 'applications/ini_precision/options',
- )
- assert (
- self.get()['headers']['X-File']
- == f'{option.test_dir}/php/ini_precision/ini/php.ini'
- ), 'ini file'
- assert self.get()['headers']['X-Precision'] == '5', 'ini value user'
+def test_php_application_conditional():
+ client.load('conditional')
- def test_php_application_ini_user_2(self):
- self.load('ini_precision')
+ assert re.search(r'True', client.get()['body']), 'conditional true'
+ assert re.search(r'False', client.post()['body']), 'conditional false'
- assert 'success' in self.conf(
- {"file": "ini/php.ini"}, 'applications/ini_precision/options'
- )
- assert self.get()['headers']['X-Precision'] == '4', 'ini user file'
+def test_php_application_get_variables():
+ client.load('get_variables')
- assert 'success' in self.conf(
- {"precision": "5"}, 'applications/ini_precision/options/user'
- )
+ resp = client.get(url='/?var1=val1&var2=&var3')
+ assert resp['headers']['X-Var-1'] == 'val1', 'GET variables'
+ assert resp['headers']['X-Var-2'] == '', 'GET variables 2'
+ assert resp['headers']['X-Var-3'] == '', 'GET variables 3'
+ assert resp['headers']['X-Var-4'] == 'not set', 'GET variables 4'
- assert self.get()['headers']['X-Precision'] == '5', 'ini value user'
- def test_php_application_ini_set_admin(self):
- self.load('ini_precision')
+def test_php_application_post_variables():
+ client.load('post_variables')
- assert 'success' in self.conf(
- {"admin": {"precision": "5"}}, 'applications/ini_precision/options'
- )
+ resp = client.post(
+ headers={
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ },
+ body='var1=val1&var2=',
+ )
+ assert resp['headers']['X-Var-1'] == 'val1', 'POST variables'
+ assert resp['headers']['X-Var-2'] == '', 'POST variables 2'
+ assert resp['headers']['X-Var-3'] == 'not set', 'POST variables 3'
- assert (
- self.get(url='/?precision=6')['headers']['X-Precision'] == '5'
- ), 'ini set admin'
- def test_php_application_ini_set_user(self):
- self.load('ini_precision')
+def test_php_application_cookies():
+ client.load('cookies')
- assert 'success' in self.conf(
- {"user": {"precision": "5"}}, 'applications/ini_precision/options'
- )
+ resp = client.get(
+ headers={
+ 'Cookie': 'var=val; var2=val2',
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ }
+ )
- assert (
- self.get(url='/?precision=6')['headers']['X-Precision'] == '6'
- ), 'ini set user'
+ assert resp['headers']['X-Cookie-1'] == 'val', 'cookie'
+ assert resp['headers']['X-Cookie-2'] == 'val2', 'cookie'
- def test_php_application_ini_repeat(self):
- self.load('ini_precision')
- assert 'success' in self.conf(
- {"user": {"precision": "5"}}, 'applications/ini_precision/options'
- )
+def test_php_application_ini_precision():
+ client.load('ini_precision')
- assert self.get()['headers']['X-Precision'] == '5', 'ini value'
+ assert client.get()['headers']['X-Precision'] != '4', 'ini value default'
- assert self.get()['headers']['X-Precision'] == '5', 'ini value repeat'
+ assert 'success' in client.conf(
+ {"file": "ini/php.ini"}, 'applications/ini_precision/options'
+ )
- def test_php_application_disable_functions_exec(self):
- self.load('time_exec')
+ assert (
+ client.get()['headers']['X-File']
+ == f'{option.test_dir}/php/ini_precision/ini/php.ini'
+ ), 'ini file'
+ assert client.get()['headers']['X-Precision'] == '4', 'ini value'
- self.before_disable_functions()
- assert 'success' in self.conf(
- {"admin": {"disable_functions": "exec"}},
- 'applications/time_exec/options',
- )
+@pytest.mark.skip('not yet')
+def test_php_application_ini_admin_user():
+ client.load('ini_precision')
- body = self.get()['body']
+ assert 'error' in client.conf(
+ {"user": {"precision": "4"}, "admin": {"precision": "5"}},
+ 'applications/ini_precision/options',
+ ), 'ini admin user'
- assert re.search(r'time: \d+', body), 'disable_functions time'
- assert not re.search(r'exec: \/\w+', body), 'disable_functions exec'
- def test_php_application_disable_functions_comma(self):
- self.load('time_exec')
+def test_php_application_ini_admin():
+ client.load('ini_precision')
- self.before_disable_functions()
+ assert 'success' in client.conf(
+ {"file": "ini/php.ini", "admin": {"precision": "5"}},
+ 'applications/ini_precision/options',
+ )
- assert 'success' in self.conf(
- {"admin": {"disable_functions": "exec,time"}},
- 'applications/time_exec/options',
- )
+ assert (
+ client.get()['headers']['X-File']
+ == f'{option.test_dir}/php/ini_precision/ini/php.ini'
+ ), 'ini file'
+ assert client.get()['headers']['X-Precision'] == '5', 'ini value admin'
- body = self.get()['body']
- assert not re.search(r'time: \d+', body), 'disable_functions comma time'
- assert not re.search(
- r'exec: \/\w+', body
- ), 'disable_functions comma exec'
+def test_php_application_ini_user():
+ client.load('ini_precision')
- def test_php_application_auth(self):
- self.load('auth')
+ assert 'success' in client.conf(
+ {"file": "ini/php.ini", "user": {"precision": "5"}},
+ 'applications/ini_precision/options',
+ )
- resp = self.get()
- assert resp['status'] == 200, 'status'
- assert resp['headers']['X-Digest'] == 'not set', 'digest'
- assert resp['headers']['X-User'] == 'not set', 'user'
- assert resp['headers']['X-Password'] == 'not set', 'password'
+ assert (
+ client.get()['headers']['X-File']
+ == f'{option.test_dir}/php/ini_precision/ini/php.ini'
+ ), 'ini file'
+ assert client.get()['headers']['X-Precision'] == '5', 'ini value user'
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Authorization': 'Basic dXNlcjpwYXNzd29yZA==',
- 'Connection': 'close',
- }
- )
- assert resp['status'] == 200, 'basic status'
- assert resp['headers']['X-Digest'] == 'not set', 'basic digest'
- assert resp['headers']['X-User'] == 'user', 'basic user'
- assert resp['headers']['X-Password'] == 'password', 'basic password'
- resp = self.get(
+def test_php_application_ini_user_2():
+ client.load('ini_precision')
+
+ assert 'success' in client.conf(
+ {"file": "ini/php.ini"}, 'applications/ini_precision/options'
+ )
+
+ assert client.get()['headers']['X-Precision'] == '4', 'ini user file'
+
+ assert 'success' in client.conf(
+ {"precision": "5"}, 'applications/ini_precision/options/user'
+ )
+
+ assert client.get()['headers']['X-Precision'] == '5', 'ini value user'
+
+
+def test_php_application_ini_set_admin():
+ client.load('ini_precision')
+
+ assert 'success' in client.conf(
+ {"admin": {"precision": "5"}}, 'applications/ini_precision/options'
+ )
+
+ assert (
+ client.get(url='/?precision=6')['headers']['X-Precision'] == '5'
+ ), 'ini set admin'
+
+
+def test_php_application_ini_set_user():
+ client.load('ini_precision')
+
+ assert 'success' in client.conf(
+ {"user": {"precision": "5"}}, 'applications/ini_precision/options'
+ )
+
+ assert (
+ client.get(url='/?precision=6')['headers']['X-Precision'] == '6'
+ ), 'ini set user'
+
+
+def test_php_application_ini_repeat():
+ client.load('ini_precision')
+
+ assert 'success' in client.conf(
+ {"user": {"precision": "5"}}, 'applications/ini_precision/options'
+ )
+
+ assert client.get()['headers']['X-Precision'] == '5', 'ini value'
+
+ assert client.get()['headers']['X-Precision'] == '5', 'ini value repeat'
+
+
+def test_php_application_disable_functions_exec():
+ client.load('time_exec')
+
+ before_disable_functions()
+
+ assert 'success' in client.conf(
+ {"admin": {"disable_functions": "exec"}},
+ 'applications/time_exec/options',
+ )
+
+ body = client.get()['body']
+
+ assert re.search(r'time: \d+', body), 'disable_functions time'
+ assert not re.search(r'exec: \/\w+', body), 'disable_functions exec'
+
+
+def test_php_application_disable_functions_comma():
+ client.load('time_exec')
+
+ before_disable_functions()
+
+ assert 'success' in client.conf(
+ {"admin": {"disable_functions": "exec,time"}},
+ 'applications/time_exec/options',
+ )
+
+ body = client.get()['body']
+
+ assert not re.search(r'time: \d+', body), 'disable_functions comma time'
+ assert not re.search(r'exec: \/\w+', body), 'disable_functions comma exec'
+
+
+def test_php_application_auth():
+ client.load('auth')
+
+ resp = client.get()
+ assert resp['status'] == 200, 'status'
+ assert resp['headers']['X-Digest'] == 'not set', 'digest'
+ assert resp['headers']['X-User'] == 'not set', 'user'
+ assert resp['headers']['X-Password'] == 'not set', 'password'
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Authorization': 'Basic dXNlcjpwYXNzd29yZA==',
+ 'Connection': 'close',
+ }
+ )
+ assert resp['status'] == 200, 'basic status'
+ assert resp['headers']['X-Digest'] == 'not set', 'basic digest'
+ assert resp['headers']['X-User'] == 'user', 'basic user'
+ assert resp['headers']['X-Password'] == 'password', 'basic password'
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Authorization': 'Digest username="blah", realm="", uri="/"',
+ 'Connection': 'close',
+ }
+ )
+ assert resp['status'] == 200, 'digest status'
+ assert (
+ resp['headers']['X-Digest'] == 'username="blah", realm="", uri="/"'
+ ), 'digest digest'
+ assert resp['headers']['X-User'] == 'not set', 'digest user'
+ assert resp['headers']['X-Password'] == 'not set', 'digest password'
+
+
+def test_php_application_auth_invalid():
+ client.load('auth')
+
+ def check_auth(auth):
+ resp = client.get(
headers={
'Host': 'localhost',
- 'Authorization': 'Digest username="blah", realm="", uri="/"',
+ 'Authorization': auth,
'Connection': 'close',
}
)
- assert resp['status'] == 200, 'digest status'
- assert (
- resp['headers']['X-Digest'] == 'username="blah", realm="", uri="/"'
- ), 'digest digest'
- assert resp['headers']['X-User'] == 'not set', 'digest user'
- assert resp['headers']['X-Password'] == 'not set', 'digest password'
-
- def test_php_application_auth_invalid(self):
- self.load('auth')
-
- def check_auth(auth):
- resp = self.get(
- headers={
- 'Host': 'localhost',
- 'Authorization': auth,
- 'Connection': 'close',
- }
- )
- assert resp['status'] == 200, 'status'
- assert resp['headers']['X-Digest'] == 'not set', 'Digest'
- assert resp['headers']['X-User'] == 'not set', 'User'
- assert resp['headers']['X-Password'] == 'not set', 'Password'
+ assert resp['status'] == 200, 'status'
+ assert resp['headers']['X-Digest'] == 'not set', 'Digest'
+ assert resp['headers']['X-User'] == 'not set', 'User'
+ assert resp['headers']['X-Password'] == 'not set', 'Password'
- check_auth('Basic dXN%cjpwYXNzd29yZA==')
- check_auth('Basic XNlcjpwYXNzd29yZA==')
- check_auth('Basic DdXNlcjpwYXNzd29yZA==')
- check_auth('Basic blah')
- check_auth('Basic')
- check_auth('Digest')
- check_auth('blah')
+ check_auth('Basic dXN%cjpwYXNzd29yZA==')
+ check_auth('Basic XNlcjpwYXNzd29yZA==')
+ check_auth('Basic DdXNlcjpwYXNzd29yZA==')
+ check_auth('Basic blah')
+ check_auth('Basic')
+ check_auth('Digest')
+ check_auth('blah')
- def test_php_application_disable_functions_space(self):
- self.load('time_exec')
- self.before_disable_functions()
+def test_php_application_disable_functions_space():
+ client.load('time_exec')
- assert 'success' in self.conf(
- {"admin": {"disable_functions": "exec time"}},
- 'applications/time_exec/options',
- )
+ before_disable_functions()
- body = self.get()['body']
+ assert 'success' in client.conf(
+ {"admin": {"disable_functions": "exec time"}},
+ 'applications/time_exec/options',
+ )
- assert not re.search(r'time: \d+', body), 'disable_functions space time'
- assert not re.search(
- r'exec: \/\w+', body
- ), 'disable_functions space exec'
+ body = client.get()['body']
- def test_php_application_disable_functions_user(self):
- self.load('time_exec')
+ assert not re.search(r'time: \d+', body), 'disable_functions space time'
+ assert not re.search(r'exec: \/\w+', body), 'disable_functions space exec'
- self.before_disable_functions()
- assert 'success' in self.conf(
- {"user": {"disable_functions": "exec"}},
- 'applications/time_exec/options',
- )
+def test_php_application_disable_functions_user():
+ client.load('time_exec')
- body = self.get()['body']
+ before_disable_functions()
- assert re.search(r'time: \d+', body), 'disable_functions user time'
- assert not re.search(
- r'exec: \/\w+', body
- ), 'disable_functions user exec'
+ assert 'success' in client.conf(
+ {"user": {"disable_functions": "exec"}},
+ 'applications/time_exec/options',
+ )
- def test_php_application_disable_functions_nonexistent(self):
- self.load('time_exec')
+ body = client.get()['body']
- self.before_disable_functions()
+ assert re.search(r'time: \d+', body), 'disable_functions user time'
+ assert not re.search(r'exec: \/\w+', body), 'disable_functions user exec'
- assert 'success' in self.conf(
- {"admin": {"disable_functions": "blah"}},
- 'applications/time_exec/options',
- )
- body = self.get()['body']
+def test_php_application_disable_functions_nonexistent():
+ client.load('time_exec')
- assert re.search(
- r'time: \d+', body
- ), 'disable_functions nonexistent time'
- assert re.search(
- r'exec: \/\w+', body
- ), 'disable_functions nonexistent exec'
+ before_disable_functions()
- def test_php_application_disable_classes(self):
- self.load('date_time')
+ assert 'success' in client.conf(
+ {"admin": {"disable_functions": "blah"}},
+ 'applications/time_exec/options',
+ )
- assert re.search(
- r'012345', self.get()['body']
- ), 'disable_classes before'
+ body = client.get()['body']
- assert 'success' in self.conf(
- {"admin": {"disable_classes": "DateTime"}},
- 'applications/date_time/options',
- )
+ assert re.search(r'time: \d+', body), 'disable_functions nonexistent time'
+ assert re.search(r'exec: \/\w+', body), 'disable_functions nonexistent exec'
- assert not re.search(
- r'012345', self.get()['body']
- ), 'disable_classes before'
- def test_php_application_disable_classes_user(self):
- self.load('date_time')
+def test_php_application_disable_classes():
+ client.load('date_time')
- assert re.search(
- r'012345', self.get()['body']
- ), 'disable_classes before'
+ assert re.search(r'012345', client.get()['body']), 'disable_classes before'
- assert 'success' in self.conf(
- {"user": {"disable_classes": "DateTime"}},
- 'applications/date_time/options',
- )
+ assert 'success' in client.conf(
+ {"admin": {"disable_classes": "DateTime"}},
+ 'applications/date_time/options',
+ )
- assert not re.search(
- r'012345', self.get()['body']
- ), 'disable_classes before'
+ assert not re.search(
+ r'012345', client.get()['body']
+ ), 'disable_classes before'
- def test_php_application_error_log(self):
- self.load('error_log')
- assert self.get()['status'] == 200, 'status'
+def test_php_application_disable_classes_user():
+ client.load('date_time')
- time.sleep(1)
+ assert re.search(r'012345', client.get()['body']), 'disable_classes before'
- assert self.get()['status'] == 200, 'status 2'
+ assert 'success' in client.conf(
+ {"user": {"disable_classes": "DateTime"}},
+ 'applications/date_time/options',
+ )
- pattern = r'\d{4}\/\d\d\/\d\d\s\d\d:.+\[notice\].+Error in application'
+ assert not re.search(
+ r'012345', client.get()['body']
+ ), 'disable_classes before'
- assert self.wait_for_record(pattern) is not None, 'errors print'
- errs = self.findall(pattern)
+def test_php_application_error_log(findall, wait_for_record):
+ client.load('error_log')
- assert len(errs) == 2, 'error_log count'
+ assert client.get()['status'] == 200, 'status'
- date = errs[0].split('[')[0]
- date2 = errs[1].split('[')[0]
- assert date != date2, 'date diff'
+ time.sleep(1)
- def test_php_application_script(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/script"}},
- "applications": {
- "script": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "root": f"{option.test_dir}/php/script",
- "script": "phpinfo.php",
- }
- },
- }
- ), 'configure script'
+ assert client.get()['status'] == 200, 'status 2'
- resp = self.get()
+ pattern = r'\d{4}\/\d\d\/\d\d\s\d\d:.+\[notice\].+Error in application'
- assert resp['status'] == 200, 'status'
- assert resp['body'] != '', 'body not empty'
-
- def test_php_application_index_default(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/phpinfo"}},
- "applications": {
- "phpinfo": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "root": f"{option.test_dir}/php/phpinfo",
- }
- },
- }
- ), 'configure index default'
+ assert wait_for_record(pattern) is not None, 'errors print'
- resp = self.get()
+ errs = findall(pattern)
- assert resp['status'] == 200, 'status'
- assert resp['body'] != '', 'body not empty'
-
- def test_php_application_trailing_slash(self, temp_dir):
- new_root = f'{temp_dir}/php-root'
- os.makedirs(f'{new_root}/path')
-
- Path(f'{new_root}/path/index.php').write_text('<?php echo "OK\n"; ?>')
-
- addr = f'{temp_dir}/sock'
-
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "applications/php-path"},
- f'unix:{addr}': {"pass": "applications/php-path"},
- },
- "applications": {
- "php-path": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "root": new_root,
- }
- },
- }
- ), 'configure trailing slash'
+ assert len(errs) == 2, 'error_log count'
- assert self.get(url='/path/')['status'] == 200, 'uri with trailing /'
+ date = errs[0].split('[')[0]
+ date2 = errs[1].split('[')[0]
+ assert date != date2, 'date diff'
- resp = self.get(url='/path?q=a')
- assert resp['status'] == 301, 'uri without trailing /'
- assert (
- resp['headers']['Location'] == 'http://localhost:7080/path/?q=a'
- ), 'Location with query string'
- resp = self.get(
- sock_type='unix',
- addr=addr,
- url='/path',
- headers={'Host': 'foo', 'Connection': 'close'},
- )
- assert resp['status'] == 301, 'uri without trailing /'
- assert (
- resp['headers']['Location'] == 'http://foo/path/'
- ), 'Location with custom Host over UDS'
-
- def test_php_application_forbidden(self, temp_dir):
- new_root = f'{temp_dir}/php-root/path'
- os.makedirs(new_root)
- os.chmod(new_root, 0o000)
-
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/php-path"}},
- "applications": {
- "php-path": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "root": f'{temp_dir}/php-root',
- }
- },
- }
- ), 'forbidden directory'
-
- assert self.get(url='/path/')['status'] == 403, 'access forbidden'
-
- def test_php_application_extension_check(self, temp_dir):
- self.load('phpinfo')
-
- assert self.get(url='/index.wrong')['status'] != 200, 'status'
-
- new_root = f'{temp_dir}/php'
- os.mkdir(new_root)
- shutil.copy(f'{option.test_dir}/php/phpinfo/index.wrong', new_root)
-
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/phpinfo"}},
- "applications": {
- "phpinfo": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "root": new_root,
- "working_directory": new_root,
- }
- },
- }
- ), 'configure new root'
+def test_php_application_script():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/script"}},
+ "applications": {
+ "script": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "root": f"{option.test_dir}/php/script",
+ "script": "phpinfo.php",
+ }
+ },
+ }
+ ), 'configure script'
- resp = self.get()
- assert f'{resp["status"]}{resp["body"]}' != '200', 'status new root'
+ resp = client.get()
- def run_php_application_cwd_root_tests(self):
- assert 'success' in self.conf_delete(
- 'applications/cwd/working_directory'
- )
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] != '', 'body not empty'
- script_cwd = f'{option.test_dir}/php/cwd'
- resp = self.get()
- assert resp['status'] == 200, 'status ok'
- assert resp['body'] == script_cwd, 'default cwd'
+def test_php_application_index_default():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/phpinfo"}},
+ "applications": {
+ "phpinfo": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "root": f"{option.test_dir}/php/phpinfo",
+ }
+ },
+ }
+ ), 'configure index default'
- assert 'success' in self.conf(
- f'"{option.test_dir}"',
- 'applications/cwd/working_directory',
- )
+ resp = client.get()
- resp = self.get()
- assert resp['status'] == 200, 'status ok'
- assert resp['body'] == script_cwd, 'wdir cwd'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] != '', 'body not empty'
- resp = self.get(url='/?chdir=/')
- assert resp['status'] == 200, 'status ok'
- assert resp['body'] == '/', 'cwd after chdir'
- # cwd must be restored
+def test_php_application_trailing_slash(temp_dir):
+ new_root = f'{temp_dir}/php-root'
+ os.makedirs(f'{new_root}/path')
- resp = self.get()
- assert resp['status'] == 200, 'status ok'
- assert resp['body'] == script_cwd, 'cwd restored'
+ Path(f'{new_root}/path/index.php').write_text('<?php echo "OK\n"; ?>')
- resp = self.get(url='/subdir/')
- assert resp['body'] == f'{script_cwd}/subdir', 'cwd subdir'
+ addr = f'{temp_dir}/sock'
- def test_php_application_cwd_root(self):
- self.load('cwd')
- self.run_php_application_cwd_root_tests()
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "applications/php-path"},
+ f'unix:{addr}': {"pass": "applications/php-path"},
+ },
+ "applications": {
+ "php-path": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "root": new_root,
+ }
+ },
+ }
+ ), 'configure trailing slash'
+
+ assert client.get(url='/path/')['status'] == 200, 'uri with trailing /'
+
+ resp = client.get(url='/path?q=a')
+ assert resp['status'] == 301, 'uri without trailing /'
+ assert (
+ resp['headers']['Location'] == 'http://localhost:7080/path/?q=a'
+ ), 'Location with query string'
+
+ resp = client.get(
+ sock_type='unix',
+ addr=addr,
+ url='/path',
+ headers={'Host': 'foo', 'Connection': 'close'},
+ )
+ assert resp['status'] == 301, 'uri without trailing /'
+ assert (
+ resp['headers']['Location'] == 'http://foo/path/'
+ ), 'Location with custom Host over UDS'
+
+
+def test_php_application_forbidden(temp_dir):
+ new_root = f'{temp_dir}/php-root/path'
+ os.makedirs(new_root)
+ os.chmod(new_root, 0o000)
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/php-path"}},
+ "applications": {
+ "php-path": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "root": f'{temp_dir}/php-root',
+ }
+ },
+ }
+ ), 'forbidden directory'
- def test_php_application_cwd_opcache_disabled(self):
- self.load('cwd')
- self.set_opcache('cwd', '0')
- self.run_php_application_cwd_root_tests()
+ assert client.get(url='/path/')['status'] == 403, 'access forbidden'
- def test_php_application_cwd_opcache_enabled(self):
- self.load('cwd')
- self.set_opcache('cwd', '1')
- self.run_php_application_cwd_root_tests()
- def run_php_application_cwd_script_tests(self):
- self.load('cwd')
+def test_php_application_extension_check(temp_dir):
+ client.load('phpinfo')
- script_cwd = f'{option.test_dir}/php/cwd'
+ assert client.get(url='/index.wrong')['status'] != 200, 'status'
+
+ new_root = f'{temp_dir}/php'
+ os.mkdir(new_root)
+ shutil.copy(f'{option.test_dir}/php/phpinfo/index.wrong', new_root)
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/phpinfo"}},
+ "applications": {
+ "phpinfo": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "root": new_root,
+ "working_directory": new_root,
+ }
+ },
+ }
+ ), 'configure new root'
+
+ resp = client.get()
+ assert f'{resp["status"]}{resp["body"]}' != '200', 'status new root'
+
+
+def test_php_application_cwd_root():
+ client.load('cwd')
+ run_php_application_cwd_root_tests()
+
+
+def test_php_application_cwd_opcache_disabled():
+ client.load('cwd')
+ set_opcache('cwd', '0')
+ run_php_application_cwd_root_tests()
+
+
+def test_php_application_cwd_opcache_enabled():
+ client.load('cwd')
+ set_opcache('cwd', '1')
+ run_php_application_cwd_root_tests()
+
+
+def test_php_application_cwd_script():
+ client.load('cwd')
+ run_php_application_cwd_script_tests()
- assert 'success' in self.conf_delete(
- 'applications/cwd/working_directory'
- )
- assert 'success' in self.conf('"index.php"', 'applications/cwd/script')
+def test_php_application_cwd_script_opcache_disabled():
+ client.load('cwd')
+ set_opcache('cwd', '0')
+ run_php_application_cwd_script_tests()
- assert self.get()['body'] == script_cwd, 'default cwd'
- assert self.get(url='/?chdir=/')['body'] == '/', 'cwd after chdir'
+def test_php_application_cwd_script_opcache_enabled():
+ client.load('cwd')
+ set_opcache('cwd', '1')
+ run_php_application_cwd_script_tests()
- # cwd must be restored
- assert self.get()['body'] == script_cwd, 'cwd restored'
- def test_php_application_cwd_script(self):
- self.load('cwd')
- self.run_php_application_cwd_script_tests()
+def test_php_application_path_relative():
+ client.load('open')
- def test_php_application_cwd_script_opcache_disabled(self):
- self.load('cwd')
- self.set_opcache('cwd', '0')
- self.run_php_application_cwd_script_tests()
+ assert client.get()['body'] == 'test', 'relative path'
- def test_php_application_cwd_script_opcache_enabled(self):
- self.load('cwd')
- self.set_opcache('cwd', '1')
- self.run_php_application_cwd_script_tests()
+ assert (
+ client.get(url='/?chdir=/')['body'] != 'test'
+ ), 'relative path w/ chdir'
- def test_php_application_path_relative(self):
- self.load('open')
+ assert client.get()['body'] == 'test', 'relative path 2'
- assert self.get()['body'] == 'test', 'relative path'
- assert (
- self.get(url='/?chdir=/')['body'] != 'test'
- ), 'relative path w/ chdir'
+def test_php_application_shared_opcache():
+ client.load('opcache', limits={'requests': 1})
- assert self.get()['body'] == 'test', 'relative path 2'
+ r = check_opcache()
+ pid = r['headers']['X-Pid']
+ assert r['headers']['X-Cached'] == '0', 'not cached'
- def test_php_application_shared_opcache(self):
- self.load('opcache', limits={'requests': 1})
+ r = client.get()
- r = self.check_opcache()
- pid = r['headers']['X-Pid']
- assert r['headers']['X-Cached'] == '0', 'not cached'
+ assert r['headers']['X-Pid'] != pid, 'new instance'
+ assert r['headers']['X-Cached'] == '1', 'cached'
- r = self.get()
- assert r['headers']['X-Pid'] != pid, 'new instance'
- assert r['headers']['X-Cached'] == '1', 'cached'
+def test_php_application_opcache_preload_chdir():
+ client.load('opcache')
- def test_php_application_opcache_preload_chdir(self, temp_dir):
- self.load('opcache')
+ check_opcache()
- self.check_opcache()
+ set_preload('chdir.php')
- self.set_preload('chdir.php')
+ assert client.get()['headers']['X-Cached'] == '0', 'not cached'
+ assert client.get()['headers']['X-Cached'] == '1', 'cached'
- assert self.get()['headers']['X-Cached'] == '0', 'not cached'
- assert self.get()['headers']['X-Cached'] == '1', 'cached'
- def test_php_application_opcache_preload_ffr(self, temp_dir):
- self.load('opcache')
+def test_php_application_opcache_preload_ffr():
+ client.load('opcache')
- self.check_opcache()
+ check_opcache()
- self.set_preload('fastcgi_finish_request.php')
+ set_preload('fastcgi_finish_request.php')
- assert self.get()['headers']['X-Cached'] == '0', 'not cached'
- assert self.get()['headers']['X-Cached'] == '1', 'cached'
+ assert client.get()['headers']['X-Cached'] == '0', 'not cached'
+ assert client.get()['headers']['X-Cached'] == '1', 'cached'
diff --git a/test/test_php_basic.py b/test/test_php_basic.py
index bcd66173..64754961 100644
--- a/test/test_php_basic.py
+++ b/test/test_php_basic.py
@@ -1,123 +1,130 @@
-from unit.control import TestControl
+from unit.control import Control
+prerequisites = {'modules': {'php': 'any'}}
-class TestPHPBasic(TestControl):
- prerequisites = {'modules': {'php': 'any'}}
+client = Control()
- conf_app = {
+conf_app = {
+ "app": {
+ "type": "php",
+ "processes": {"spare": 0},
+ "root": "/app",
+ "index": "index.php",
+ }
+}
+
+conf_basic = {
+ "listeners": {"*:7080": {"pass": "applications/app"}},
+ "applications": conf_app,
+}
+
+
+def test_php_get_applications():
+ assert 'success' in client.conf(conf_app, 'applications')
+
+ conf = client.conf_get()
+
+ assert conf['listeners'] == {}, 'listeners'
+ assert conf['applications'] == {
"app": {
"type": "php",
"processes": {"spare": 0},
"root": "/app",
"index": "index.php",
}
- }
-
- conf_basic = {
- "listeners": {"*:7080": {"pass": "applications/app"}},
- "applications": conf_app,
- }
+ }, 'applications'
- def test_php_get_applications(self):
- assert 'success' in self.conf(self.conf_app, 'applications')
-
- conf = self.conf_get()
-
- assert conf['listeners'] == {}, 'listeners'
- assert conf['applications'] == {
- "app": {
- "type": "php",
- "processes": {"spare": 0},
- "root": "/app",
- "index": "index.php",
- }
- }, 'applications'
-
- assert self.conf_get('applications') == {
- "app": {
- "type": "php",
- "processes": {"spare": 0},
- "root": "/app",
- "index": "index.php",
- }
- }, 'applications prefix'
-
- assert self.conf_get('applications/app') == {
+ assert client.conf_get('applications') == {
+ "app": {
"type": "php",
"processes": {"spare": 0},
"root": "/app",
"index": "index.php",
- }, 'applications prefix 2'
-
- assert self.conf_get('applications/app/type') == 'php', 'type'
- assert (
- self.conf_get('applications/app/processes/spare') == 0
- ), 'spare processes'
-
- def test_php_get_listeners(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert self.conf_get()['listeners'] == {
- "*:7080": {"pass": "applications/app"}
- }, 'listeners'
-
- assert self.conf_get('listeners') == {
- "*:7080": {"pass": "applications/app"}
- }, 'listeners prefix'
-
- assert self.conf_get('listeners/*:7080') == {
- "pass": "applications/app"
- }, 'listeners prefix 2'
-
- def test_php_change_listener(self):
- assert 'success' in self.conf(self.conf_basic)
- assert 'success' in self.conf(
- {"*:7081": {"pass": "applications/app"}}, 'listeners'
- )
-
- assert self.conf_get('listeners') == {
- "*:7081": {"pass": "applications/app"}
- }, 'change listener'
-
- def test_php_add_listener(self):
- assert 'success' in self.conf(self.conf_basic)
- assert 'success' in self.conf(
- {"pass": "applications/app"}, 'listeners/*:7082'
- )
-
- assert self.conf_get('listeners') == {
- "*:7080": {"pass": "applications/app"},
- "*:7082": {"pass": "applications/app"},
- }, 'add listener'
-
- def test_php_change_application(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'success' in self.conf('30', 'applications/app/processes/max')
- assert (
- self.conf_get('applications/app/processes/max') == 30
- ), 'change application max'
-
- assert 'success' in self.conf('"/www"', 'applications/app/root')
- assert (
- self.conf_get('applications/app/root') == '/www'
- ), 'change application root'
-
- def test_php_delete(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'error' in self.conf_delete('applications/app')
- assert 'success' in self.conf_delete('listeners/*:7080')
- assert 'success' in self.conf_delete('applications/app')
- assert 'error' in self.conf_delete('applications/app')
-
- def test_php_delete_blocks(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'success' in self.conf_delete('listeners')
- assert 'success' in self.conf_delete('applications')
-
- assert 'success' in self.conf(self.conf_app, 'applications')
- assert 'success' in self.conf(
- {"*:7081": {"pass": "applications/app"}}, 'listeners'
- ), 'applications restore'
+ }
+ }, 'applications prefix'
+
+ assert client.conf_get('applications/app') == {
+ "type": "php",
+ "processes": {"spare": 0},
+ "root": "/app",
+ "index": "index.php",
+ }, 'applications prefix 2'
+
+ assert client.conf_get('applications/app/type') == 'php', 'type'
+ assert (
+ client.conf_get('applications/app/processes/spare') == 0
+ ), 'spare processes'
+
+
+def test_php_get_listeners():
+ assert 'success' in client.conf(conf_basic)
+
+ assert client.conf_get()['listeners'] == {
+ "*:7080": {"pass": "applications/app"}
+ }, 'listeners'
+
+ assert client.conf_get('listeners') == {
+ "*:7080": {"pass": "applications/app"}
+ }, 'listeners prefix'
+
+ assert client.conf_get('listeners/*:7080') == {
+ "pass": "applications/app"
+ }, 'listeners prefix 2'
+
+
+def test_php_change_listener():
+ assert 'success' in client.conf(conf_basic)
+ assert 'success' in client.conf(
+ {"*:7081": {"pass": "applications/app"}}, 'listeners'
+ )
+
+ assert client.conf_get('listeners') == {
+ "*:7081": {"pass": "applications/app"}
+ }, 'change listener'
+
+
+def test_php_add_listener():
+ assert 'success' in client.conf(conf_basic)
+ assert 'success' in client.conf(
+ {"pass": "applications/app"}, 'listeners/*:7082'
+ )
+
+ assert client.conf_get('listeners') == {
+ "*:7080": {"pass": "applications/app"},
+ "*:7082": {"pass": "applications/app"},
+ }, 'add listener'
+
+
+def test_php_change_application():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'success' in client.conf('30', 'applications/app/processes/max')
+ assert (
+ client.conf_get('applications/app/processes/max') == 30
+ ), 'change application max'
+
+ assert 'success' in client.conf('"/www"', 'applications/app/root')
+ assert (
+ client.conf_get('applications/app/root') == '/www'
+ ), 'change application root'
+
+
+def test_php_delete():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'error' in client.conf_delete('applications/app')
+ assert 'success' in client.conf_delete('listeners/*:7080')
+ assert 'success' in client.conf_delete('applications/app')
+ assert 'error' in client.conf_delete('applications/app')
+
+
+def test_php_delete_blocks():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'success' in client.conf_delete('listeners')
+ assert 'success' in client.conf_delete('applications')
+
+ assert 'success' in client.conf(conf_app, 'applications')
+ assert 'success' in client.conf(
+ {"*:7081": {"pass": "applications/app"}}, 'listeners'
+ ), 'applications restore'
diff --git a/test/test_php_isolation.py b/test/test_php_isolation.py
index aebeefa6..f248da41 100644
--- a/test/test_php_isolation.py
+++ b/test/test_php_isolation.py
@@ -1,89 +1,85 @@
-import pytest
-from unit.applications.lang.php import TestApplicationPHP
-from unit.option import option
+from unit.applications.lang.php import ApplicationPHP
+prerequisites = {'modules': {'php': 'any'}, 'features': {'isolation': True}}
-class TestPHPIsolation(TestApplicationPHP):
- prerequisites = {'modules': {'php': 'any'}, 'features': ['isolation']}
+client = ApplicationPHP()
- def test_php_isolation_rootfs(self, is_su, temp_dir):
- isolation_features = option.available['features']['isolation'].keys()
- if not is_su:
- if not 'unprivileged_userns_clone' in isolation_features:
- pytest.skip('requires unprivileged userns or root')
+def test_php_isolation_rootfs(is_su, require, temp_dir):
+ isolation = {'rootfs': temp_dir}
- if 'user' not in isolation_features:
- pytest.skip('user namespace is not supported')
-
- if 'mnt' not in isolation_features:
- pytest.skip('mnt namespace is not supported')
-
- if 'pid' not in isolation_features:
- pytest.skip('pid namespace is not supported')
-
- isolation = {'rootfs': temp_dir}
-
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
}
-
- self.load('phpinfo', isolation=isolation)
-
- assert 'success' in self.conf(
- '"/app/php/phpinfo"', 'applications/phpinfo/root'
- )
- assert 'success' in self.conf(
- '"/app/php/phpinfo"', 'applications/phpinfo/working_directory'
)
- assert self.get()['status'] == 200, 'empty rootfs'
-
- def test_php_isolation_rootfs_extensions(self, is_su, temp_dir):
- isolation_features = option.available['features']['isolation'].keys()
-
- if not is_su:
- if not 'unprivileged_userns_clone' in isolation_features:
- pytest.skip('requires unprivileged userns or root')
-
- if 'user' not in isolation_features:
- pytest.skip('user namespace is not supported')
-
- if 'mnt' not in isolation_features:
- pytest.skip('mnt namespace is not supported')
-
- if 'pid' not in isolation_features:
- pytest.skip('pid namespace is not supported')
-
- isolation = {'rootfs': temp_dir}
-
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
+
+ client.load('phpinfo', isolation=isolation)
+
+ assert 'success' in client.conf(
+ '"/app/php/phpinfo"', 'applications/phpinfo/root'
+ )
+ assert 'success' in client.conf(
+ '"/app/php/phpinfo"', 'applications/phpinfo/working_directory'
+ )
+
+ assert client.get()['status'] == 200, 'empty rootfs'
+
+
+def test_php_isolation_rootfs_extensions(is_su, require, temp_dir):
+ isolation = {'rootfs': temp_dir}
+
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
}
+ )
- self.load('list-extensions', isolation=isolation)
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
- assert 'success' in self.conf(
- '"/app/php/list-extensions"', 'applications/list-extensions/root'
- )
+ client.load('list-extensions', isolation=isolation)
- assert 'success' in self.conf(
- {'file': '/php/list-extensions/php.ini'},
- 'applications/list-extensions/options',
- )
+ assert 'success' in client.conf(
+ '"/app/php/list-extensions"', 'applications/list-extensions/root'
+ )
- assert 'success' in self.conf(
- '"/app/php/list-extensions"',
- 'applications/list-extensions/working_directory',
- )
+ assert 'success' in client.conf(
+ {'file': '/php/list-extensions/php.ini'},
+ 'applications/list-extensions/options',
+ )
+
+ assert 'success' in client.conf(
+ '"/app/php/list-extensions"',
+ 'applications/list-extensions/working_directory',
+ )
- extensions = self.getjson()['body']
+ extensions = client.getjson()['body']
- assert 'json' in extensions, 'json in extensions list'
- assert 'unit' in extensions, 'unit in extensions list'
+ assert 'json' in extensions, 'json in extensions list'
+ assert 'unit' in extensions, 'unit in extensions list'
diff --git a/test/test_php_targets.py b/test/test_php_targets.py
index e74f2ec6..857a2dc8 100644
--- a/test/test_php_targets.py
+++ b/test/test_php_targets.py
@@ -1,100 +1,100 @@
-from unit.applications.lang.php import TestApplicationPHP
+from unit.applications.lang.php import ApplicationPHP
from unit.option import option
+prerequisites = {'modules': {'php': 'any'}}
-class TestPHPTargets(TestApplicationPHP):
- prerequisites = {'modules': {'php': 'any'}}
+client = ApplicationPHP()
- def test_php_application_targets(self):
- targets_dir = f"{option.test_dir}/php/targets"
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/1"},
- "action": {"pass": "applications/targets/1"},
- },
- {
- "match": {"uri": "/2"},
- "action": {"pass": "applications/targets/2"},
- },
- {"action": {"pass": "applications/targets/default"}},
- ],
- "applications": {
+
+def test_php_application_targets():
+ targets_dir = f"{option.test_dir}/php/targets"
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/1"},
+ "action": {"pass": "applications/targets/1"},
+ },
+ {
+ "match": {"uri": "/2"},
+ "action": {"pass": "applications/targets/2"},
+ },
+ {"action": {"pass": "applications/targets/default"}},
+ ],
+ "applications": {
+ "targets": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
"targets": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "targets": {
- "1": {
- "script": "1.php",
- "root": targets_dir,
- },
- "2": {
- "script": "2.php",
- "root": f'{targets_dir}/2',
- },
- "default": {
- "index": "index.php",
- "root": targets_dir,
- },
+ "1": {
+ "script": "1.php",
+ "root": targets_dir,
},
- }
- },
- }
- )
+ "2": {
+ "script": "2.php",
+ "root": f'{targets_dir}/2',
+ },
+ "default": {
+ "index": "index.php",
+ "root": targets_dir,
+ },
+ },
+ }
+ },
+ }
+ )
- assert self.get(url='/1')['body'] == '1'
- assert self.get(url='/2')['body'] == '2'
- assert self.get(url='/blah')['status'] == 404
- assert self.get(url='/')['body'] == 'index'
- assert self.get(url='/1.php?test=test.php/')['body'] == '1'
+ assert client.get(url='/1')['body'] == '1'
+ assert client.get(url='/2')['body'] == '2'
+ assert client.get(url='/blah')['status'] == 404
+ assert client.get(url='/')['body'] == 'index'
+ assert client.get(url='/1.php?test=test.php/')['body'] == '1'
- assert 'success' in self.conf(
- "\"1.php\"", 'applications/targets/targets/default/index'
- ), 'change targets index'
- assert self.get(url='/')['body'] == '1'
+ assert 'success' in client.conf(
+ "\"1.php\"", 'applications/targets/targets/default/index'
+ ), 'change targets index'
+ assert client.get(url='/')['body'] == '1'
- assert 'success' in self.conf_delete(
- 'applications/targets/targets/default/index'
- ), 'remove targets index'
- assert self.get(url='/')['body'] == 'index'
+ assert 'success' in client.conf_delete(
+ 'applications/targets/targets/default/index'
+ ), 'remove targets index'
+ assert client.get(url='/')['body'] == 'index'
- def test_php_application_targets_error(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "applications/targets/default"}
- },
- "applications": {
+
+def test_php_application_targets_error():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/targets/default"}},
+ "applications": {
+ "targets": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
"targets": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "targets": {
- "default": {
- "index": "index.php",
- "root": f"{option.test_dir}/php/targets",
- },
+ "default": {
+ "index": "index.php",
+ "root": f"{option.test_dir}/php/targets",
},
- }
- },
- }
- ), 'initial configuration'
- assert self.get()['status'] == 200
+ },
+ }
+ },
+ }
+ ), 'initial configuration'
+ assert client.get()['status'] == 200
- assert 'error' in self.conf(
- {"pass": "applications/targets/blah"}, 'listeners/*:7080'
- ), 'invalid targets pass'
- assert 'error' in self.conf(
- f'"{option.test_dir}/php/targets"',
- 'applications/targets/root',
- ), 'invalid root'
- assert 'error' in self.conf(
- '"index.php"', 'applications/targets/index'
- ), 'invalid index'
- assert 'error' in self.conf(
- '"index.php"', 'applications/targets/script'
- ), 'invalid script'
- assert 'error' in self.conf_delete(
- 'applications/targets/default/root'
- ), 'root remove'
+ assert 'error' in client.conf(
+ {"pass": "applications/targets/blah"}, 'listeners/*:7080'
+ ), 'invalid targets pass'
+ assert 'error' in client.conf(
+ f'"{option.test_dir}/php/targets"',
+ 'applications/targets/root',
+ ), 'invalid root'
+ assert 'error' in client.conf(
+ '"index.php"', 'applications/targets/index'
+ ), 'invalid index'
+ assert 'error' in client.conf(
+ '"index.php"', 'applications/targets/script'
+ ), 'invalid script'
+ assert 'error' in client.conf_delete(
+ 'applications/targets/default/root'
+ ), 'root remove'
diff --git a/test/test_proxy.py b/test/test_proxy.py
index 74e48ca1..207e90e7 100644
--- a/test/test_proxy.py
+++ b/test/test_proxy.py
@@ -4,486 +4,504 @@ import time
import pytest
from conftest import run_process
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
from unit.utils import waitforsocket
+prerequisites = {'modules': {'python': 'any'}}
-class TestProxy(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
+SERVER_PORT = 7999
- SERVER_PORT = 7999
- @staticmethod
- def run_server(server_port):
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ run_process(run_server, SERVER_PORT)
+ waitforsocket(SERVER_PORT)
- server_address = ('', server_port)
- sock.bind(server_address)
- sock.listen(5)
+ python_dir = f'{option.test_dir}/python'
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/mirror"},
+ },
+ "routes": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
+ "applications": {
+ "mirror": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{python_dir}/mirror',
+ "working_directory": f'{python_dir}/mirror',
+ "module": "wsgi",
+ },
+ "custom_header": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{python_dir}/custom_header',
+ "working_directory": f'{python_dir}/custom_header',
+ "module": "wsgi",
+ },
+ "delayed": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{python_dir}/delayed',
+ "working_directory": f'{python_dir}/delayed',
+ "module": "wsgi",
+ },
+ },
+ }
+ ), 'proxy initial configuration'
+
+
+def run_server(server_port):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+
+ server_address = ('', server_port)
+ sock.bind(server_address)
+ sock.listen(5)
- def recvall(sock):
- buff_size = 4096
- data = b''
- while True:
- part = sock.recv(buff_size)
- data += part
- if len(part) < buff_size:
- break
- return data
+ def recvall(sock):
+ buff_size = 4096
+ data = b''
+ while True:
+ part = sock.recv(buff_size)
+ data += part
+ if len(part) < buff_size:
+ break
+ return data
- req = b"""HTTP/1.1 200 OK
+ req = b"""HTTP/1.1 200 OK
Content-Length: 10
"""
- while True:
- connection, client_address = sock.accept()
+ while True:
+ connection, _ = sock.accept()
- data = recvall(connection).decode()
+ data = recvall(connection).decode()
- to_send = req
+ to_send = req
- m = re.search(r'X-Len: (\d+)', data)
- if m:
- to_send += b'X' * int(m.group(1))
+ m = re.search(r'X-Len: (\d+)', data)
+ if m:
+ to_send += b'X' * int(m.group(1))
- connection.sendall(to_send)
+ connection.sendall(to_send)
- connection.close()
+ connection.close()
- def get_http10(self, *args, **kwargs):
- return self.get(*args, http_10=True, **kwargs)
- def post_http10(self, *args, **kwargs):
- return self.post(*args, http_10=True, **kwargs)
+def get_http10(*args, **kwargs):
+ return client.get(*args, http_10=True, **kwargs)
- def setup_method(self):
- run_process(self.run_server, self.SERVER_PORT)
- waitforsocket(self.SERVER_PORT)
- python_dir = f'{option.test_dir}/python'
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/mirror"},
- },
- "routes": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
- "applications": {
- "mirror": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{python_dir}/mirror',
- "working_directory": f'{python_dir}/mirror',
- "module": "wsgi",
- },
- "custom_header": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{python_dir}/custom_header',
- "working_directory": f'{python_dir}/custom_header',
- "module": "wsgi",
- },
- "delayed": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{python_dir}/delayed',
- "working_directory": f'{python_dir}/delayed',
- "module": "wsgi",
- },
- },
- }
- ), 'proxy initial configuration'
-
- def test_proxy_http10(self):
- for _ in range(10):
- assert self.get_http10()['status'] == 200, 'status'
-
- def test_proxy_chain(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes/first"},
- "*:7081": {"pass": "routes/second"},
- "*:7082": {"pass": "routes/third"},
- "*:7083": {"pass": "routes/fourth"},
- "*:7084": {"pass": "routes/fifth"},
- "*:7085": {"pass": "applications/mirror"},
- },
- "routes": {
- "first": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
- "second": [{"action": {"proxy": "http://127.0.0.1:7082"}}],
- "third": [{"action": {"proxy": "http://127.0.0.1:7083"}}],
- "fourth": [{"action": {"proxy": "http://127.0.0.1:7084"}}],
- "fifth": [{"action": {"proxy": "http://127.0.0.1:7085"}}],
- },
- "applications": {
- "mirror": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{option.test_dir}/python/mirror',
- "working_directory": f'{option.test_dir}/python/mirror',
- "module": "wsgi",
- }
- },
- }
- ), 'proxy chain configuration'
+def post_http10(*args, **kwargs):
+ return client.post(*args, http_10=True, **kwargs)
- assert self.get_http10()['status'] == 200, 'status'
- def test_proxy_body(self):
- payload = '0123456789'
- for _ in range(10):
- resp = self.post_http10(body=payload)
+def test_proxy_http10():
+ for _ in range(10):
+ assert get_http10()['status'] == 200, 'status'
- assert resp['status'] == 200, 'status'
- assert resp['body'] == payload, 'body'
- payload = 'X' * 4096
- for _ in range(10):
- resp = self.post_http10(body=payload)
+def test_proxy_chain():
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes/first"},
+ "*:7081": {"pass": "routes/second"},
+ "*:7082": {"pass": "routes/third"},
+ "*:7083": {"pass": "routes/fourth"},
+ "*:7084": {"pass": "routes/fifth"},
+ "*:7085": {"pass": "applications/mirror"},
+ },
+ "routes": {
+ "first": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
+ "second": [{"action": {"proxy": "http://127.0.0.1:7082"}}],
+ "third": [{"action": {"proxy": "http://127.0.0.1:7083"}}],
+ "fourth": [{"action": {"proxy": "http://127.0.0.1:7084"}}],
+ "fifth": [{"action": {"proxy": "http://127.0.0.1:7085"}}],
+ },
+ "applications": {
+ "mirror": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{option.test_dir}/python/mirror',
+ "working_directory": f'{option.test_dir}/python/mirror',
+ "module": "wsgi",
+ }
+ },
+ }
+ ), 'proxy chain configuration'
+
+ assert get_http10()['status'] == 200, 'status'
- assert resp['status'] == 200, 'status'
- assert resp['body'] == payload, 'body'
- payload = 'X' * 4097
- for _ in range(10):
- resp = self.post_http10(body=payload)
+def test_proxy_body():
+ payload = '0123456789'
+ for _ in range(10):
+ resp = post_http10(body=payload)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == payload, 'body'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == payload, 'body'
- payload = 'X' * 4096 * 256
- for _ in range(10):
- resp = self.post_http10(body=payload, read_buffer_size=4096 * 128)
+ payload = 'X' * 4096
+ for _ in range(10):
+ resp = post_http10(body=payload)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == payload, 'body'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == payload, 'body'
- payload = 'X' * 4096 * 257
- for _ in range(10):
- resp = self.post_http10(body=payload, read_buffer_size=4096 * 128)
+ payload = 'X' * 4097
+ for _ in range(10):
+ resp = post_http10(body=payload)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == payload, 'body'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == payload, 'body'
- assert 'success' in self.conf(
- {'http': {'max_body_size': 32 * 1024 * 1024}}, 'settings'
- )
+ payload = 'X' * 4096 * 256
+ for _ in range(10):
+ resp = post_http10(body=payload, read_buffer_size=4096 * 128)
- payload = '0123456789abcdef' * 32 * 64 * 1024
- resp = self.post_http10(body=payload, read_buffer_size=1024 * 1024)
assert resp['status'] == 200, 'status'
assert resp['body'] == payload, 'body'
- def test_proxy_parallel(self):
- payload = 'X' * 4096 * 257
- buff_size = 4096 * 258
+ payload = 'X' * 4096 * 257
+ for _ in range(10):
+ resp = post_http10(body=payload, read_buffer_size=4096 * 128)
- socks = []
- for i in range(10):
- sock = self.post_http10(
- body=f'{payload}{i}',
- no_recv=True,
- read_buffer_size=buff_size,
- )
- socks.append(sock)
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == payload, 'body'
- for i in range(10):
- resp = self.recvall(socks[i], buff_size=buff_size).decode()
- socks[i].close()
+ assert 'success' in client.conf(
+ {'http': {'max_body_size': 32 * 1024 * 1024}}, 'settings'
+ )
- resp = self._resp_to_dict(resp)
+ payload = '0123456789abcdef' * 32 * 64 * 1024
+ resp = post_http10(body=payload, read_buffer_size=1024 * 1024)
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == payload, 'body'
- assert resp['status'] == 200, 'status'
- assert resp['body'] == f'{payload}{i}', 'body'
- def test_proxy_header(self):
- assert 'success' in self.conf(
- {"pass": "applications/custom_header"}, 'listeners/*:7081'
- ), 'custom_header configure'
+def test_proxy_parallel():
+ payload = 'X' * 4096 * 257
+ buff_size = 4096 * 258
- header_value = 'blah'
- assert (
- self.get_http10(
- headers={'Host': 'localhost', 'Custom-Header': header_value}
- )['headers']['Custom-Header']
- == header_value
- ), 'custom header'
+ socks = []
+ for i in range(10):
+ sock = post_http10(
+ body=f'{payload}{i}',
+ no_recv=True,
+ read_buffer_size=buff_size,
+ )
+ socks.append(sock)
- header_value = r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~"
- assert (
- self.get_http10(
- headers={'Host': 'localhost', 'Custom-Header': header_value}
- )['headers']['Custom-Header']
- == header_value
- ), 'custom header 2'
+ for i in range(10):
+ resp = client.recvall(socks[i], buff_size=buff_size).decode()
+ socks[i].close()
- header_value = 'X' * 4096
- assert (
- self.get_http10(
- headers={'Host': 'localhost', 'Custom-Header': header_value}
- )['headers']['Custom-Header']
- == header_value
- ), 'custom header 3'
+ resp = client._resp_to_dict(resp)
- header_value = 'X' * 8191
- assert (
- self.get_http10(
- headers={'Host': 'localhost', 'Custom-Header': header_value}
- )['headers']['Custom-Header']
- == header_value
- ), 'custom header 4'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == f'{payload}{i}', 'body'
- header_value = 'X' * 8192
- assert (
- self.get_http10(
- headers={'Host': 'localhost', 'Custom-Header': header_value}
- )['status']
- == 431
- ), 'custom header 5'
- def test_proxy_fragmented(self):
- sock = self.http(b"""GET / HTT""", raw=True, no_recv=True)
+def test_proxy_header():
+ assert 'success' in client.conf(
+ {"pass": "applications/custom_header"}, 'listeners/*:7081'
+ ), 'custom_header configure'
- time.sleep(1)
+ header_value = 'blah'
+ assert (
+ get_http10(
+ headers={'Host': 'localhost', 'Custom-Header': header_value}
+ )['headers']['Custom-Header']
+ == header_value
+ ), 'custom header'
- sock.sendall("P/1.0\r\nHost: localhos".encode())
+ header_value = r"(),/:;<=>?@[\]{}\t !#$%&'*+-.^_`|~"
+ assert (
+ get_http10(
+ headers={'Host': 'localhost', 'Custom-Header': header_value}
+ )['headers']['Custom-Header']
+ == header_value
+ ), 'custom header 2'
- time.sleep(1)
+ header_value = 'X' * 4096
+ assert (
+ get_http10(
+ headers={'Host': 'localhost', 'Custom-Header': header_value}
+ )['headers']['Custom-Header']
+ == header_value
+ ), 'custom header 3'
- sock.sendall("t\r\n\r\n".encode())
+ header_value = 'X' * 8191
+ assert (
+ get_http10(
+ headers={'Host': 'localhost', 'Custom-Header': header_value}
+ )['headers']['Custom-Header']
+ == header_value
+ ), 'custom header 4'
- assert re.search(
- '200 OK', self.recvall(sock).decode()
- ), 'fragmented send'
- sock.close()
+ header_value = 'X' * 8192
+ assert (
+ get_http10(
+ headers={'Host': 'localhost', 'Custom-Header': header_value}
+ )['status']
+ == 431
+ ), 'custom header 5'
- def test_proxy_fragmented_close(self):
- sock = self.http(b"""GET / HTT""", raw=True, no_recv=True)
- time.sleep(1)
+def test_proxy_fragmented():
+ sock = client.http(b"""GET / HTT""", raw=True, no_recv=True)
- sock.sendall("P/1.0\r\nHo".encode())
+ time.sleep(1)
- sock.close()
+ sock.sendall("P/1.0\r\nHost: localhos".encode())
- def test_proxy_fragmented_body(self):
- sock = self.http(b"""GET / HTT""", raw=True, no_recv=True)
+ time.sleep(1)
- time.sleep(1)
+ sock.sendall("t\r\n\r\n".encode())
- sock.sendall("P/1.0\r\nHost: localhost\r\n".encode())
- sock.sendall("Content-Length: 30000\r\n".encode())
+ assert re.search('200 OK', client.recvall(sock).decode()), 'fragmented send'
+ sock.close()
- time.sleep(1)
- sock.sendall("\r\n".encode())
- sock.sendall(("X" * 10000).encode())
+def test_proxy_fragmented_close():
+ sock = client.http(b"""GET / HTT""", raw=True, no_recv=True)
- time.sleep(1)
+ time.sleep(1)
- sock.sendall(("X" * 10000).encode())
+ sock.sendall("P/1.0\r\nHo".encode())
- time.sleep(1)
+ sock.close()
- sock.sendall(("X" * 10000).encode())
- resp = self._resp_to_dict(self.recvall(sock).decode())
- sock.close()
+def test_proxy_fragmented_body():
+ sock = client.http(b"""GET / HTT""", raw=True, no_recv=True)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == "X" * 30000, 'body'
+ time.sleep(1)
- def test_proxy_fragmented_body_close(self):
- sock = self.http(b"""GET / HTT""", raw=True, no_recv=True)
+ sock.sendall("P/1.0\r\nHost: localhost\r\n".encode())
+ sock.sendall("Content-Length: 30000\r\n".encode())
- time.sleep(1)
+ time.sleep(1)
- sock.sendall("P/1.0\r\nHost: localhost\r\n".encode())
- sock.sendall("Content-Length: 30000\r\n".encode())
+ sock.sendall("\r\n".encode())
+ sock.sendall(("X" * 10000).encode())
- time.sleep(1)
+ time.sleep(1)
- sock.sendall("\r\n".encode())
- sock.sendall(("X" * 10000).encode())
+ sock.sendall(("X" * 10000).encode())
- sock.close()
+ time.sleep(1)
- def test_proxy_nowhere(self):
- assert 'success' in self.conf(
- [{"action": {"proxy": "http://127.0.0.1:7082"}}], 'routes'
- ), 'proxy path changed'
+ sock.sendall(("X" * 10000).encode())
- assert self.get_http10()['status'] == 502, 'status'
+ resp = client._resp_to_dict(client.recvall(sock).decode())
+ sock.close()
- def test_proxy_ipv6(self):
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "routes"},
- "[::1]:7081": {'application': 'mirror'},
- },
- 'listeners',
- ), 'add ipv6 listener configure'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == "X" * 30000, 'body'
- assert 'success' in self.conf(
- [{"action": {"proxy": "http://[::1]:7081"}}], 'routes'
- ), 'proxy ipv6 configure'
- assert self.get_http10()['status'] == 200, 'status'
+def test_proxy_fragmented_body_close():
+ sock = client.http(b"""GET / HTT""", raw=True, no_recv=True)
- def test_proxy_unix(self, temp_dir):
- addr = f'{temp_dir}/sock'
+ time.sleep(1)
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "routes"},
- f'unix:{addr}': {'application': 'mirror'},
- },
- 'listeners',
- ), 'add unix listener configure'
-
- assert 'success' in self.conf(
- [{"action": {"proxy": f'http://unix:{addr}'}}], 'routes'
- ), 'proxy unix configure'
-
- assert self.get_http10()['status'] == 200, 'status'
-
- def test_proxy_delayed(self):
- assert 'success' in self.conf(
- {"pass": "applications/delayed"}, 'listeners/*:7081'
- ), 'delayed configure'
-
- body = '0123456789' * 1000
- resp = self.post_http10(
- headers={
- 'Host': 'localhost',
- 'Content-Length': str(len(body)),
- 'X-Parts': '2',
- 'X-Delay': '1',
- },
- body=body,
- )
+ sock.sendall("P/1.0\r\nHost: localhost\r\n".encode())
+ sock.sendall("Content-Length: 30000\r\n".encode())
- assert resp['status'] == 200, 'status'
- assert resp['body'] == body, 'body'
-
- resp = self.post_http10(
- headers={
- 'Host': 'localhost',
- 'Content-Length': str(len(body)),
- 'X-Parts': '2',
- 'X-Delay': '1',
- },
- body=body,
- )
+ time.sleep(1)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == body, 'body'
-
- def test_proxy_delayed_close(self):
- assert 'success' in self.conf(
- {"pass": "applications/delayed"}, 'listeners/*:7081'
- ), 'delayed configure'
-
- sock = self.post_http10(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '10000',
- 'X-Parts': '3',
- 'X-Delay': '1',
- },
- body='0123456789' * 1000,
- no_recv=True,
- )
+ sock.sendall("\r\n".encode())
+ sock.sendall(("X" * 10000).encode())
- assert re.search('200 OK', sock.recv(100).decode()), 'first'
- sock.close()
+ sock.close()
- sock = self.post_http10(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '10000',
- 'X-Parts': '3',
- 'X-Delay': '1',
- },
- body='0123456789' * 1000,
- no_recv=True,
- )
- assert re.search('200 OK', sock.recv(100).decode()), 'second'
- sock.close()
-
- @pytest.mark.skip('not yet')
- def test_proxy_content_length(self):
- assert 'success' in self.conf(
- [{"action": {"proxy": f'http://127.0.0.1:{self.SERVER_PORT}'}}],
- 'routes',
- ), 'proxy backend configure'
-
- resp = self.get_http10()
- assert len(resp['body']) == 0, 'body lt Content-Length 0'
-
- resp = self.get_http10(headers={'Host': 'localhost', 'X-Len': '5'})
- assert len(resp['body']) == 5, 'body lt Content-Length 5'
-
- resp = self.get_http10(headers={'Host': 'localhost', 'X-Len': '9'})
- assert len(resp['body']) == 9, 'body lt Content-Length 9'
-
- resp = self.get_http10(headers={'Host': 'localhost', 'X-Len': '11'})
- assert len(resp['body']) == 10, 'body gt Content-Length 11'
-
- resp = self.get_http10(headers={'Host': 'localhost', 'X-Len': '15'})
- assert len(resp['body']) == 10, 'body gt Content-Length 15'
-
- def test_proxy_invalid(self):
- def check_proxy(proxy):
- assert 'error' in self.conf(
- [{"action": {"proxy": proxy}}], 'routes'
- ), 'proxy invalid'
-
- check_proxy('blah')
- check_proxy('/blah')
- check_proxy('unix:/blah')
- check_proxy('http://blah')
- check_proxy('http://127.0.0.1')
- check_proxy('http://127.0.0.1:')
- check_proxy('http://127.0.0.1:blah')
- check_proxy('http://127.0.0.1:-1')
- check_proxy('http://127.0.0.1:7080b')
- check_proxy('http://[]')
- check_proxy('http://[]:7080')
- check_proxy('http://[:]:7080')
- check_proxy('http://[::7080')
-
- @pytest.mark.skip('not yet')
- def test_proxy_loop(self, skip_alert):
- skip_alert(
- r'socket.*failed',
- r'accept.*failed',
- r'new connections are not accepted',
- )
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/mirror"},
- "*:7082": {"pass": "routes"},
- },
- "routes": [{"action": {"proxy": "http://127.0.0.1:7082"}}],
- "applications": {
- "mirror": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{option.test_dir}/python/mirror',
- "working_directory": f'{option.test_dir}/python/mirror',
- "module": "wsgi",
- },
+def test_proxy_nowhere():
+ assert 'success' in client.conf(
+ [{"action": {"proxy": "http://127.0.0.1:7082"}}], 'routes'
+ ), 'proxy path changed'
+
+ assert get_http10()['status'] == 502, 'status'
+
+
+def test_proxy_ipv6():
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "routes"},
+ "[::1]:7081": {'application': 'mirror'},
+ },
+ 'listeners',
+ ), 'add ipv6 listener configure'
+
+ assert 'success' in client.conf(
+ [{"action": {"proxy": "http://[::1]:7081"}}], 'routes'
+ ), 'proxy ipv6 configure'
+
+ assert get_http10()['status'] == 200, 'status'
+
+
+def test_proxy_unix(temp_dir):
+ addr = f'{temp_dir}/sock'
+
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "routes"},
+ f'unix:{addr}': {'application': 'mirror'},
+ },
+ 'listeners',
+ ), 'add unix listener configure'
+
+ assert 'success' in client.conf(
+ [{"action": {"proxy": f'http://unix:{addr}'}}], 'routes'
+ ), 'proxy unix configure'
+
+ assert get_http10()['status'] == 200, 'status'
+
+
+def test_proxy_delayed():
+ assert 'success' in client.conf(
+ {"pass": "applications/delayed"}, 'listeners/*:7081'
+ ), 'delayed configure'
+
+ body = '0123456789' * 1000
+ resp = post_http10(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': str(len(body)),
+ 'X-Parts': '2',
+ 'X-Delay': '1',
+ },
+ body=body,
+ )
+
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == body, 'body'
+
+ resp = post_http10(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': str(len(body)),
+ 'X-Parts': '2',
+ 'X-Delay': '1',
+ },
+ body=body,
+ )
+
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == body, 'body'
+
+
+def test_proxy_delayed_close():
+ assert 'success' in client.conf(
+ {"pass": "applications/delayed"}, 'listeners/*:7081'
+ ), 'delayed configure'
+
+ sock = post_http10(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '10000',
+ 'X-Parts': '3',
+ 'X-Delay': '1',
+ },
+ body='0123456789' * 1000,
+ no_recv=True,
+ )
+
+ assert re.search('200 OK', sock.recv(100).decode()), 'first'
+ sock.close()
+
+ sock = post_http10(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '10000',
+ 'X-Parts': '3',
+ 'X-Delay': '1',
+ },
+ body='0123456789' * 1000,
+ no_recv=True,
+ )
+
+ assert re.search('200 OK', sock.recv(100).decode()), 'second'
+ sock.close()
+
+
+@pytest.mark.skip('not yet')
+def test_proxy_content_length():
+ assert 'success' in client.conf(
+ [{"action": {"proxy": f'http://127.0.0.1:{SERVER_PORT}'}}],
+ 'routes',
+ ), 'proxy backend configure'
+
+ resp = get_http10()
+ assert len(resp['body']) == 0, 'body lt Content-Length 0'
+
+ resp = get_http10(headers={'Host': 'localhost', 'X-Len': '5'})
+ assert len(resp['body']) == 5, 'body lt Content-Length 5'
+
+ resp = get_http10(headers={'Host': 'localhost', 'X-Len': '9'})
+ assert len(resp['body']) == 9, 'body lt Content-Length 9'
+
+ resp = get_http10(headers={'Host': 'localhost', 'X-Len': '11'})
+ assert len(resp['body']) == 10, 'body gt Content-Length 11'
+
+ resp = get_http10(headers={'Host': 'localhost', 'X-Len': '15'})
+ assert len(resp['body']) == 10, 'body gt Content-Length 15'
+
+
+def test_proxy_invalid():
+ def check_proxy(proxy):
+ assert 'error' in client.conf(
+ [{"action": {"proxy": proxy}}], 'routes'
+ ), 'proxy invalid'
+
+ check_proxy('blah')
+ check_proxy('/blah')
+ check_proxy('unix:/blah')
+ check_proxy('http://blah')
+ check_proxy('http://127.0.0.1')
+ check_proxy('http://127.0.0.1:')
+ check_proxy('http://127.0.0.1:blah')
+ check_proxy('http://127.0.0.1:-1')
+ check_proxy('http://127.0.0.1:7080b')
+ check_proxy('http://[]')
+ check_proxy('http://[]:7080')
+ check_proxy('http://[:]:7080')
+ check_proxy('http://[::7080')
+
+
+@pytest.mark.skip('not yet')
+def test_proxy_loop(skip_alert):
+ skip_alert(
+ r'socket.*failed',
+ r'accept.*failed',
+ r'new connections are not accepted',
+ )
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/mirror"},
+ "*:7082": {"pass": "routes"},
+ },
+ "routes": [{"action": {"proxy": "http://127.0.0.1:7082"}}],
+ "applications": {
+ "mirror": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{option.test_dir}/python/mirror',
+ "working_directory": f'{option.test_dir}/python/mirror',
+ "module": "wsgi",
},
- }
- )
+ },
+ }
+ )
- self.get_http10(no_recv=True)
- self.get_http10(read_timeout=1)
+ get_http10(no_recv=True)
+ get_http10(read_timeout=1)
diff --git a/test/test_proxy_chunked.py b/test/test_proxy_chunked.py
index f31c976a..a066e1e8 100644
--- a/test/test_proxy_chunked.py
+++ b/test/test_proxy_chunked.py
@@ -3,235 +3,226 @@ import select
import socket
import time
+import pytest
from conftest import run_process
-from unit.applications.lang.python import TestApplicationPython
-from unit.option import option
+from unit.applications.lang.python import ApplicationPython
from unit.utils import waitforsocket
+prerequisites = {'modules': {'python': 'any'}}
-class TestProxyChunked(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
+SERVER_PORT = 7999
- SERVER_PORT = 7999
- @staticmethod
- def run_server(server_port, temp_dir):
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ run_process(run_server, SERVER_PORT)
+ waitforsocket(SERVER_PORT)
- server_address = ('127.0.0.1', server_port)
- sock.bind(server_address)
- sock.listen(10)
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ },
+ "routes": [
+ {"action": {"proxy": f'http://127.0.0.1:{SERVER_PORT}'}}
+ ],
+ }
+ ), 'proxy initial configuration'
- def recvall(sock):
- buff_size = 4096 * 4096
- data = b''
- while True:
- rlist = select.select([sock], [], [], 0.1)
- if not rlist[0]:
- break
+def run_server(server_port):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- part = sock.recv(buff_size)
- data += part
+ server_address = ('127.0.0.1', server_port)
+ sock.bind(server_address)
+ sock.listen(10)
- if not len(part):
- break
+ def recvall(sock):
+ buff_size = 4096 * 4096
+ data = b''
+ while True:
+ rlist = select.select([sock], [], [], 0.1)
- return data
+ if not rlist[0]:
+ break
- while True:
- connection, client_address = sock.accept()
+ part = sock.recv(buff_size)
+ data += part
- req = """HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked"""
+ if not len(part):
+ break
- data = recvall(connection).decode()
+ return data
- m = re.search('\x0d\x0a\x0d\x0a(.*)', data, re.M | re.S)
- if m is not None:
- body = m.group(1)
+ while True:
+ connection, _ = sock.accept()
- for line in re.split('\r\n', body):
- add = ''
- m1 = re.search(r'(.*)\sX\s(\d+)', line)
+ req = """HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked"""
- if m1 is not None:
- add = m1.group(1) * int(m1.group(2))
- else:
- add = line
+ data = recvall(connection).decode()
- req = f'{req}{add}\r\n'
+ m = re.search('\x0d\x0a\x0d\x0a(.*)', data, re.M | re.S)
+ if m is not None:
+ body = m.group(1)
- for chunk in re.split(r'([@#])', req):
- if chunk == '@' or chunk == '#':
- if chunk == '#':
- time.sleep(0.1)
- continue
+ for line in re.split('\r\n', body):
+ add = ''
+ m1 = re.search(r'(.*)\sX\s(\d+)', line)
- connection.sendall(chunk.encode())
+ if m1 is not None:
+ add = m1.group(1) * int(m1.group(2))
+ else:
+ add = line
- connection.close()
+ req = f'{req}{add}\r\n'
- def chunks(self, chunks):
- body = '\r\n\r\n'
+ for chunk in re.split(r'([@#])', req):
+ if chunk == '@' or chunk == '#':
+ if chunk == '#':
+ time.sleep(0.1)
+ continue
- for l, c in chunks:
- body = f'{body}{l}\r\n{c}\r\n'
+ connection.sendall(chunk.encode())
- return f'{body}0\r\n\r\n'
+ connection.close()
- def get_http10(self, *args, **kwargs):
- return self.get(*args, http_10=True, **kwargs)
- def setup_method(self):
- run_process(self.run_server, self.SERVER_PORT, option.temp_dir)
- waitforsocket(self.SERVER_PORT)
+def chunks(chunks):
+ body = '\r\n\r\n'
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- },
- "routes": [
- {
- "action": {
- "proxy": f'http://127.0.0.1:{self.SERVER_PORT}'
- }
- }
- ],
- }
- ), 'proxy initial configuration'
+ for l, c in chunks:
+ body = f'{body}{l}\r\n{c}\r\n'
- def test_proxy_chunked(self):
- for _ in range(10):
- assert self.get_http10(body='\r\n\r\n0\r\n\r\n')['status'] == 200
+ return f'{body}0\r\n\r\n'
- def test_proxy_chunked_body(self):
- part = '0123456789abcdef'
- assert (
- self.get_http10(body=self.chunks([('1000', f'{part} X 256')]))[
- 'body'
- ]
- == part * 256
- )
- assert (
- self.get_http10(body=self.chunks([('100000', f'{part} X 65536')]))[
- 'body'
- ]
- == part * 65536
- )
- assert (
- self.get_http10(
- body=self.chunks([('1000000', f'{part} X 1048576')]),
- read_buffer_size=4096 * 4096,
- )['body']
- == part * 1048576
- )
+def get_http10(*args, **kwargs):
+ return client.get(*args, http_10=True, **kwargs)
- assert (
- self.get_http10(
- body=self.chunks(
- [('1000', f'{part} X 256'), ('1000', f'{part} X 256')]
- )
- )['body']
- == part * 256 * 2
- )
- assert (
- self.get_http10(
- body=self.chunks(
- [
- ('100000', f'{part} X 65536'),
- ('100000', f'{part} X 65536'),
- ]
- )
- )['body']
- == part * 65536 * 2
- )
- assert (
- self.get_http10(
- body=self.chunks(
- [
- ('1000000', f'{part} X 1048576'),
- ('1000000', f'{part} X 1048576'),
- ]
- ),
- read_buffer_size=4096 * 4096,
- )['body']
- == part * 1048576 * 2
- )
- def test_proxy_chunked_fragmented(self):
- part = '0123456789abcdef'
+def test_proxy_chunked():
+ for _ in range(10):
+ assert get_http10(body='\r\n\r\n0\r\n\r\n')['status'] == 200
- assert (
- self.get_http10(
- body=self.chunks([('1', hex(i % 16)[2:]) for i in range(4096)]),
- )['body']
- == part * 256
- )
- def test_proxy_chunked_send(self):
- assert self.get_http10(body='\r\n\r\n@0@\r\n\r\n')['status'] == 200
- assert (
- self.get_http10(
- body='\r@\n\r\n2\r@\na@b\r\n2\r\ncd@\r\n0\r@\n\r\n'
- )['body']
- == 'abcd'
- )
- assert (
- self.get_http10(
- body='\r\n\r\n2\r#\na#b\r\n##2\r\n#cd\r\n0\r\n#\r#\n'
- )['body']
- == 'abcd'
- )
+def test_proxy_chunked_body():
+ part = '0123456789abcdef'
- def test_proxy_chunked_invalid(self):
- def check_invalid(body):
- assert self.get_http10(body=body)['status'] != 200
-
- check_invalid('\r\n\r0')
- check_invalid('\r\n\r\n\r0')
- check_invalid('\r\n\r\n\r\n0')
- check_invalid('\r\nContent-Length: 5\r\n\r\n0\r\n\r\n')
- check_invalid('\r\n\r\n1\r\nXX\r\n0\r\n\r\n')
- check_invalid('\r\n\r\n2\r\nX\r\n0\r\n\r\n')
- check_invalid('\r\n\r\nH\r\nXX\r\n0\r\n\r\n')
- check_invalid('\r\n\r\n0\r\nX')
-
- resp = self.get_http10(body='\r\n\r\n65#\r\nA X 100')
- assert resp['status'] == 200, 'incomplete chunk status'
- assert resp['body'][-5:] != '0\r\n\r\n', 'incomplete chunk'
-
- resp = self.get_http10(body='\r\n\r\n64#\r\nA X 100')
- assert resp['status'] == 200, 'no zero chunk status'
- assert resp['body'][-5:] != '0\r\n\r\n', 'no zero chunk'
-
- assert (
- self.get_http10(body='\r\n\r\n80000000\r\nA X 100')['status'] == 200
- )
- assert (
- self.get_http10(body='\r\n\r\n10000000000000000\r\nA X 100')[
- 'status'
- ]
- == 502
- )
- assert (
- len(
- self.get_http10(
- body='\r\n\r\n1000000\r\nA X 1048576\r\n1000000\r\nA X 100',
- read_buffer_size=4096 * 4096,
- )['body']
+ assert (
+ get_http10(body=chunks([('1000', f'{part} X 256')]))['body']
+ == part * 256
+ )
+ assert (
+ get_http10(body=chunks([('100000', f'{part} X 65536')]))['body']
+ == part * 65536
+ )
+ assert (
+ get_http10(
+ body=chunks([('1000000', f'{part} X 1048576')]),
+ read_buffer_size=4096 * 4096,
+ )['body']
+ == part * 1048576
+ )
+
+ assert (
+ get_http10(
+ body=chunks([('1000', f'{part} X 256'), ('1000', f'{part} X 256')])
+ )['body']
+ == part * 256 * 2
+ )
+ assert (
+ get_http10(
+ body=chunks(
+ [
+ ('100000', f'{part} X 65536'),
+ ('100000', f'{part} X 65536'),
+ ]
)
- >= 1048576
+ )['body']
+ == part * 65536 * 2
+ )
+ assert (
+ get_http10(
+ body=chunks(
+ [
+ ('1000000', f'{part} X 1048576'),
+ ('1000000', f'{part} X 1048576'),
+ ]
+ ),
+ read_buffer_size=4096 * 4096,
+ )['body']
+ == part * 1048576 * 2
+ )
+
+
+def test_proxy_chunked_fragmented():
+ part = '0123456789abcdef'
+
+ assert (
+ get_http10(
+ body=chunks([('1', hex(i % 16)[2:]) for i in range(4096)]),
+ )['body']
+ == part * 256
+ )
+
+
+def test_proxy_chunked_send():
+ assert get_http10(body='\r\n\r\n@0@\r\n\r\n')['status'] == 200
+ assert (
+ get_http10(body='\r@\n\r\n2\r@\na@b\r\n2\r\ncd@\r\n0\r@\n\r\n')['body']
+ == 'abcd'
+ )
+ assert (
+ get_http10(body='\r\n\r\n2\r#\na#b\r\n##2\r\n#cd\r\n0\r\n#\r#\n')[
+ 'body'
+ ]
+ == 'abcd'
+ )
+
+
+def test_proxy_chunked_invalid():
+ def check_invalid(body):
+ assert get_http10(body=body)['status'] != 200
+
+ check_invalid('\r\n\r0')
+ check_invalid('\r\n\r\n\r0')
+ check_invalid('\r\n\r\n\r\n0')
+ check_invalid('\r\nContent-Length: 5\r\n\r\n0\r\n\r\n')
+ check_invalid('\r\n\r\n1\r\nXX\r\n0\r\n\r\n')
+ check_invalid('\r\n\r\n2\r\nX\r\n0\r\n\r\n')
+ check_invalid('\r\n\r\nH\r\nXX\r\n0\r\n\r\n')
+ check_invalid('\r\n\r\n0\r\nX')
+
+ resp = get_http10(body='\r\n\r\n65#\r\nA X 100')
+ assert resp['status'] == 200, 'incomplete chunk status'
+ assert resp['body'][-5:] != '0\r\n\r\n', 'incomplete chunk'
+
+ resp = get_http10(body='\r\n\r\n64#\r\nA X 100')
+ assert resp['status'] == 200, 'no zero chunk status'
+ assert resp['body'][-5:] != '0\r\n\r\n', 'no zero chunk'
+
+ assert get_http10(body='\r\n\r\n80000000\r\nA X 100')['status'] == 200
+ assert (
+ get_http10(body='\r\n\r\n10000000000000000\r\nA X 100')['status'] == 502
+ )
+ assert (
+ len(
+ get_http10(
+ body='\r\n\r\n1000000\r\nA X 1048576\r\n1000000\r\nA X 100',
+ read_buffer_size=4096 * 4096,
+ )['body']
)
- assert (
- len(
- self.get_http10(
- body='\r\n\r\n1000000\r\nA X 1048576\r\nXXX\r\nA X 100',
- read_buffer_size=4096 * 4096,
- )['body']
- )
- >= 1048576
+ >= 1048576
+ )
+ assert (
+ len(
+ get_http10(
+ body='\r\n\r\n1000000\r\nA X 1048576\r\nXXX\r\nA X 100',
+ read_buffer_size=4096 * 4096,
+ )['body']
)
+ >= 1048576
+ )
diff --git a/test/test_python_application.py b/test/test_python_application.py
index d412ac68..18473d59 100644
--- a/test/test_python_application.py
+++ b/test/test_python_application.py
@@ -8,19 +8,20 @@ import venv
import pytest
from packaging import version
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'all'}}
-class TestPythonApplication(TestApplicationPython):
- prerequisites = {'modules': {'python': 'all'}}
+client = ApplicationPython()
- def test_python_application_variables(self):
- self.load('variables')
- body = 'Test body string.'
+def test_python_application_variables(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- resp = self.http(
- f"""POST / HTTP/1.1
+ body = 'Test body string.'
+
+ resp = client.http(
+ f"""POST / HTTP/1.1
Host: localhost
Content-Length: {len(body)}
Custom-Header: blah
@@ -30,878 +31,903 @@ Connection: close
custom-header: BLAH
{body}""".encode(),
- raw=True,
- )
+ raw=True,
+ )
+
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+ assert (
+ headers.pop('Server-Software') == header_server
+ ), 'server software header'
+
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
+
+ assert headers == {
+ 'Connection': 'close',
+ 'Content-Length': str(len(body)),
+ 'Content-Type': 'text/html',
+ 'Request-Method': 'POST',
+ 'Request-Uri': '/',
+ 'Http-Host': 'localhost',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Custom-Header': 'blah, Blah, BLAH',
+ 'Wsgi-Version': '(1, 0)',
+ 'Wsgi-Url-Scheme': 'http',
+ 'Wsgi-Multithread': 'False',
+ 'Wsgi-Multiprocess': 'True',
+ 'Wsgi-Run-Once': 'False',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
- assert (
- headers.pop('Server-Software') == header_server
- ), 'server software header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- assert headers == {
- 'Connection': 'close',
- 'Content-Length': str(len(body)),
- 'Content-Type': 'text/html',
- 'Request-Method': 'POST',
- 'Request-Uri': '/',
- 'Http-Host': 'localhost',
- 'Server-Protocol': 'HTTP/1.1',
- 'Custom-Header': 'blah, Blah, BLAH',
- 'Wsgi-Version': '(1, 0)',
- 'Wsgi-Url-Scheme': 'http',
- 'Wsgi-Multithread': 'False',
- 'Wsgi-Multiprocess': 'True',
- 'Wsgi-Run-Once': 'False',
- }, 'headers'
- assert resp['body'] == body, 'body'
-
- def test_python_application_query_string(self):
- self.load('query_string')
-
- resp = self.get(url='/?var1=val1&var2=val2')
-
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String header'
-
- def test_python_application_query_string_space(self):
- self.load('query_string')
-
- resp = self.get(url='/ ?var1=val1&var2=val2')
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String space'
-
- resp = self.get(url='/ %20?var1=val1&var2=val2')
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String space 2'
-
- resp = self.get(url='/ %20 ?var1=val1&var2=val2')
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String space 3'
-
- resp = self.get(url='/blah %20 blah? var1= val1 & var2=val2')
- assert (
- resp['headers']['Query-String'] == ' var1= val1 & var2=val2'
- ), 'Query-String space 4'
- def test_python_application_prefix(self):
- self.load('prefix', prefix='/api/rest')
+def test_python_application_query_string():
+ client.load('query_string')
- def set_prefix(prefix):
- self.conf(f'"{prefix}"', 'applications/prefix/prefix')
+ resp = client.get(url='/?var1=val1&var2=val2')
- def check_prefix(url, script_name, path_info):
- resp = self.get(url=url)
- assert resp['status'] == 200
- assert resp['headers']['Script-Name'] == script_name
- assert resp['headers']['Path-Info'] == path_info
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String header'
- check_prefix('/ap', 'NULL', '/ap')
- check_prefix('/api', 'NULL', '/api')
- check_prefix('/api/', 'NULL', '/api/')
- check_prefix('/api/res', 'NULL', '/api/res')
- check_prefix('/api/restful', 'NULL', '/api/restful')
- check_prefix('/api/rest', '/api/rest', '')
- check_prefix('/api/rest/', '/api/rest', '/')
- check_prefix('/api/rest/get', '/api/rest', '/get')
- check_prefix('/api/rest/get/blah', '/api/rest', '/get/blah')
- set_prefix('/api/rest/')
- check_prefix('/api/rest', '/api/rest', '')
- check_prefix('/api/restful', 'NULL', '/api/restful')
- check_prefix('/api/rest/', '/api/rest', '/')
- check_prefix('/api/rest/blah', '/api/rest', '/blah')
+def test_python_application_query_string_space():
+ client.load('query_string')
- set_prefix('/app')
- check_prefix('/ap', 'NULL', '/ap')
- check_prefix('/app', '/app', '')
- check_prefix('/app/', '/app', '/')
- check_prefix('/application/', 'NULL', '/application/')
+ resp = client.get(url='/ ?var1=val1&var2=val2')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String space'
- set_prefix('/')
- check_prefix('/', 'NULL', '/')
- check_prefix('/app', 'NULL', '/app')
+ resp = client.get(url='/ %20?var1=val1&var2=val2')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String space 2'
- def test_python_application_query_string_empty(self):
- self.load('query_string')
+ resp = client.get(url='/ %20 ?var1=val1&var2=val2')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String space 3'
- resp = self.get(url='/?')
+ resp = client.get(url='/blah %20 blah? var1= val1 & var2=val2')
+ assert (
+ resp['headers']['Query-String'] == ' var1= val1 & var2=val2'
+ ), 'Query-String space 4'
- assert resp['status'] == 200, 'query string empty status'
- assert resp['headers']['Query-String'] == '', 'query string empty'
- def test_python_application_query_string_absent(self):
- self.load('query_string')
+def test_python_application_prefix():
+ client.load('prefix', prefix='/api/rest')
- resp = self.get()
+ def set_prefix(prefix):
+ client.conf(f'"{prefix}"', 'applications/prefix/prefix')
- assert resp['status'] == 200, 'query string absent status'
- assert resp['headers']['Query-String'] == '', 'query string absent'
+ def check_prefix(url, script_name, path_info):
+ resp = client.get(url=url)
+ assert resp['status'] == 200
+ assert resp['headers']['Script-Name'] == script_name
+ assert resp['headers']['Path-Info'] == path_info
- @pytest.mark.skip('not yet')
- def test_python_application_server_port(self):
- self.load('server_port')
+ check_prefix('/ap', 'NULL', '/ap')
+ check_prefix('/api', 'NULL', '/api')
+ check_prefix('/api/', 'NULL', '/api/')
+ check_prefix('/api/res', 'NULL', '/api/res')
+ check_prefix('/api/restful', 'NULL', '/api/restful')
+ check_prefix('/api/rest', '/api/rest', '')
+ check_prefix('/api/rest/', '/api/rest', '/')
+ check_prefix('/api/rest/get', '/api/rest', '/get')
+ check_prefix('/api/rest/get/blah', '/api/rest', '/get/blah')
- assert (
- self.get()['headers']['Server-Port'] == '7080'
- ), 'Server-Port header'
+ set_prefix('/api/rest/')
+ check_prefix('/api/rest', '/api/rest', '')
+ check_prefix('/api/restful', 'NULL', '/api/restful')
+ check_prefix('/api/rest/', '/api/rest', '/')
+ check_prefix('/api/rest/blah', '/api/rest', '/blah')
- @pytest.mark.skip('not yet')
- def test_python_application_working_directory_invalid(self):
- self.load('empty')
+ set_prefix('/app')
+ check_prefix('/ap', 'NULL', '/ap')
+ check_prefix('/app', '/app', '')
+ check_prefix('/app/', '/app', '/')
+ check_prefix('/application/', 'NULL', '/application/')
- assert 'success' in self.conf(
- '"/blah"', 'applications/empty/working_directory'
- ), 'configure invalid working_directory'
+ set_prefix('/')
+ check_prefix('/', 'NULL', '/')
+ check_prefix('/app', 'NULL', '/app')
- assert self.get()['status'] == 500, 'status'
- def test_python_application_204_transfer_encoding(self):
- self.load('204_no_content')
+def test_python_application_query_string_empty():
+ client.load('query_string')
- assert (
- 'Transfer-Encoding' not in self.get()['headers']
- ), '204 header transfer encoding'
+ resp = client.get(url='/?')
- def test_python_application_ctx_iter_atexit(self):
- self.load('ctx_iter_atexit')
+ assert resp['status'] == 200, 'query string empty status'
+ assert resp['headers']['Query-String'] == '', 'query string empty'
- resp = self.post(body='0123456789')
- assert resp['status'] == 200, 'ctx iter status'
- assert resp['body'] == '0123456789', 'ctx iter body'
+def test_python_application_query_string_absent():
+ client.load('query_string')
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
+ resp = client.get()
- assert (
- self.wait_for_record(r'RuntimeError') is not None
- ), 'ctx iter atexit'
+ assert resp['status'] == 200, 'query string absent status'
+ assert resp['headers']['Query-String'] == '', 'query string absent'
- def test_python_keepalive_body(self):
- self.load('mirror')
- assert self.get()['status'] == 200, 'init'
+@pytest.mark.skip('not yet')
+def test_python_application_server_port():
+ client.load('server_port')
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ assert (
+ client.get()['headers']['Server-Port'] == '7080'
+ ), 'Server-Port header'
- assert resp['body'] == body, 'keep-alive 1'
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
+@pytest.mark.skip('not yet')
+def test_python_application_working_directory_invalid():
+ client.load('empty')
- assert resp['body'] == body, 'keep-alive 2'
+ assert 'success' in client.conf(
+ '"/blah"', 'applications/empty/working_directory'
+ ), 'configure invalid working_directory'
- def test_python_keepalive_reconfigure(self):
- self.load('mirror')
+ assert client.get()['status'] == 500, 'status'
- assert self.get()['status'] == 200, 'init'
- body = '0123456789'
- conns = 3
- socks = []
+def test_python_application_204_transfer_encoding():
+ client.load('204_no_content')
- for i in range(conns):
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
+ assert (
+ 'Transfer-Encoding' not in client.get()['headers']
+ ), '204 header transfer encoding'
- assert resp['body'] == body, 'keep-alive open'
- self.load('mirror', processes=i + 1)
+def test_python_application_ctx_iter_atexit(wait_for_record):
+ client.load('ctx_iter_atexit')
- socks.append(sock)
+ resp = client.post(body='0123456789')
- for i in range(conns):
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- sock=socks[i],
- body=body,
- read_timeout=1,
- )
+ assert resp['status'] == 200, 'ctx iter status'
+ assert resp['body'] == '0123456789', 'ctx iter body'
+
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
+
+ assert wait_for_record(r'RuntimeError') is not None, 'ctx iter atexit'
+
+
+def test_python_keepalive_body():
+ client.load('mirror')
+
+ assert client.get()['status'] == 200, 'init'
+
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
+
+ assert resp['body'] == body, 'keep-alive 1'
+
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
- assert resp['body'] == body, 'keep-alive request'
+ assert resp['body'] == body, 'keep-alive 2'
- self.load('mirror', processes=i + 1)
- for i in range(conns):
- resp = self.post(sock=socks[i], body=body)
+def test_python_keepalive_reconfigure():
+ client.load('mirror')
- assert resp['body'] == body, 'keep-alive close'
+ assert client.get()['status'] == 200, 'init'
- self.load('mirror', processes=i + 1)
+ body = '0123456789'
+ conns = 3
+ socks = []
+
+ for i in range(conns):
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
- def test_python_keepalive_reconfigure_2(self):
- self.load('mirror')
+ assert resp['body'] == body, 'keep-alive open'
- assert self.get()['status'] == 200, 'init'
+ client.load('mirror', processes=i + 1)
- body = '0123456789'
+ socks.append(sock)
- (resp, sock) = self.post(
+ for i in range(conns):
+ (resp, sock) = client.post(
headers={
'Host': 'localhost',
'Connection': 'keep-alive',
},
start=True,
+ sock=socks[i],
body=body,
read_timeout=1,
)
- assert resp['body'] == body, 'reconfigure 2 keep-alive 1'
+ assert resp['body'] == body, 'keep-alive request'
- self.load('empty')
+ client.load('mirror', processes=i + 1)
- assert self.get()['status'] == 200, 'init'
+ for i in range(conns):
+ resp = client.post(sock=socks[i], body=body)
- (resp, sock) = self.post(start=True, sock=sock, body=body)
+ assert resp['body'] == body, 'keep-alive close'
- assert resp['status'] == 200, 'reconfigure 2 keep-alive 2'
- assert resp['body'] == '', 'reconfigure 2 keep-alive 2 body'
+ client.load('mirror', processes=i + 1)
- assert 'success' in self.conf(
- {"listeners": {}, "applications": {}}
- ), 'reconfigure 2 clear configuration'
- resp = self.get(sock=sock)
+def test_python_keepalive_reconfigure_2():
+ client.load('mirror')
- assert resp == {}, 'reconfigure 2 keep-alive 3'
+ assert client.get()['status'] == 200, 'init'
- def test_python_atexit(self):
- self.load('atexit')
+ body = '0123456789'
- self.get()
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
+ assert resp['body'] == body, 'reconfigure 2 keep-alive 1'
- assert self.wait_for_record(r'At exit called\.') is not None, 'atexit'
+ client.load('empty')
- def test_python_process_switch(self):
- self.load('delayed', processes=2)
+ assert client.get()['status'] == 200, 'init'
- self.get(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '0',
- 'X-Delay': '5',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+ (resp, sock) = client.post(start=True, sock=sock, body=body)
- headers_delay_1 = {
- 'Connection': 'close',
+ assert resp['status'] == 200, 'reconfigure 2 keep-alive 2'
+ assert resp['body'] == '', 'reconfigure 2 keep-alive 2 body'
+
+ assert 'success' in client.conf(
+ {"listeners": {}, "applications": {}}
+ ), 'reconfigure 2 clear configuration'
+
+ resp = client.get(sock=sock)
+
+ assert resp == {}, 'reconfigure 2 keep-alive 3'
+
+
+def test_python_atexit(wait_for_record):
+ client.load('atexit')
+
+ client.get()
+
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
+
+ assert wait_for_record(r'At exit called\.') is not None, 'atexit'
+
+
+def test_python_process_switch():
+ client.load('delayed', processes=2)
+
+ client.get(
+ headers={
'Host': 'localhost',
'Content-Length': '0',
- 'X-Delay': '1',
- }
+ 'X-Delay': '5',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+
+ headers_delay_1 = {
+ 'Connection': 'close',
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'X-Delay': '1',
+ }
+
+ client.get(headers=headers_delay_1, no_recv=True)
- self.get(headers=headers_delay_1, no_recv=True)
+ time.sleep(0.5)
- time.sleep(0.5)
+ for _ in range(10):
+ client.get(headers=headers_delay_1, no_recv=True)
- for _ in range(10):
- self.get(headers=headers_delay_1, no_recv=True)
+ client.get(headers=headers_delay_1)
- self.get(headers=headers_delay_1)
- @pytest.mark.skip('not yet')
- def test_python_application_start_response_exit(self):
- self.load('start_response_exit')
+@pytest.mark.skip('not yet')
+def test_python_application_start_response_exit():
+ client.load('start_response_exit')
- assert self.get()['status'] == 500, 'start response exit'
+ assert client.get()['status'] == 500, 'start response exit'
- def test_python_application_input_iter(self):
- self.load('input_iter')
- body = '''0123456789
+def test_python_application_input_iter():
+ client.load('input_iter')
+
+ body = '''0123456789
next line
last line'''
- resp = self.post(body=body)
- assert resp['body'] == body, 'input iter'
- assert resp['headers']['X-Lines-Count'] == '4', 'input iter lines'
+ resp = client.post(body=body)
+ assert resp['body'] == body, 'input iter'
+ assert resp['headers']['X-Lines-Count'] == '4', 'input iter lines'
+
- def test_python_application_input_readline(self):
- self.load('input_readline')
+def test_python_application_input_readline():
+ client.load('input_readline')
- body = '''0123456789
+ body = '''0123456789
next line
last line'''
- resp = self.post(body=body)
- assert resp['body'] == body, 'input readline'
- assert resp['headers']['X-Lines-Count'] == '4', 'input readline lines'
+ resp = client.post(body=body)
+ assert resp['body'] == body, 'input readline'
+ assert resp['headers']['X-Lines-Count'] == '4', 'input readline lines'
+
- def test_python_application_input_readline_size(self):
- self.load('input_readline_size')
+def test_python_application_input_readline_size():
+ client.load('input_readline_size')
- body = '''0123456789
+ body = '''0123456789
next line
last line'''
- assert self.post(body=body)['body'] == body, 'input readline size'
- assert (
- self.post(body='0123')['body'] == '0123'
- ), 'input readline size less'
+ assert client.post(body=body)['body'] == body, 'input readline size'
+ assert (
+ client.post(body='0123')['body'] == '0123'
+ ), 'input readline size less'
- def test_python_application_input_readlines(self):
- self.load('input_readlines')
- body = '''0123456789
+def test_python_application_input_readlines():
+ client.load('input_readlines')
+
+ body = '''0123456789
next line
last line'''
- resp = self.post(body=body)
- assert resp['body'] == body, 'input readlines'
- assert resp['headers']['X-Lines-Count'] == '4', 'input readlines lines'
+ resp = client.post(body=body)
+ assert resp['body'] == body, 'input readlines'
+ assert resp['headers']['X-Lines-Count'] == '4', 'input readlines lines'
+
- def test_python_application_input_readlines_huge(self):
- self.load('input_readlines')
+def test_python_application_input_readlines_huge():
+ client.load('input_readlines')
- body = (
- '''0123456789 abcdefghi
+ body = (
+ '''0123456789 abcdefghi
next line: 0123456789 abcdefghi
last line: 987654321
'''
- * 512
- )
+ * 512
+ )
- assert (
- self.post(body=body, read_buffer_size=16384)['body'] == body
- ), 'input readlines huge'
+ assert (
+ client.post(body=body, read_buffer_size=16384)['body'] == body
+ ), 'input readlines huge'
- def test_python_application_input_read_length(self):
- self.load('input_read_length')
- body = '0123456789'
+def test_python_application_input_read_length():
+ client.load('input_read_length')
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Input-Length': '5',
- 'Connection': 'close',
- },
- body=body,
- )
+ body = '0123456789'
- assert resp['body'] == body[:5], 'input read length lt body'
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Input-Length': '5',
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Input-Length': '15',
- 'Connection': 'close',
- },
- body=body,
- )
+ assert resp['body'] == body[:5], 'input read length lt body'
- assert resp['body'] == body, 'input read length gt body'
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Input-Length': '15',
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Input-Length': '0',
- 'Connection': 'close',
- },
- body=body,
- )
+ assert resp['body'] == body, 'input read length gt body'
- assert resp['body'] == '', 'input read length zero'
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Input-Length': '0',
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Input-Length': '-1',
- 'Connection': 'close',
- },
- body=body,
- )
+ assert resp['body'] == '', 'input read length zero'
- assert resp['body'] == body, 'input read length negative'
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Input-Length': '-1',
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- @pytest.mark.skip('not yet')
- def test_python_application_errors_write(self):
- self.load('errors_write')
+ assert resp['body'] == body, 'input read length negative'
- self.get()
- assert (
- self.wait_for_record(r'\[error\].+Error in application\.')
- is not None
- ), 'errors write'
+@pytest.mark.skip('not yet')
+def test_python_application_errors_write(wait_for_record):
+ client.load('errors_write')
- def test_python_application_body_array(self):
- self.load('body_array')
+ client.get()
- assert self.get()['body'] == '0123456789', 'body array'
+ assert (
+ wait_for_record(r'\[error\].+Error in application\.') is not None
+ ), 'errors write'
- def test_python_application_body_io(self):
- self.load('body_io')
- assert self.get()['body'] == '0123456789', 'body io'
+def test_python_application_body_array():
+ client.load('body_array')
- def test_python_application_body_io_file(self):
- self.load('body_io_file')
+ assert client.get()['body'] == '0123456789', 'body array'
- assert self.get()['body'] == 'body\n', 'body io file'
- @pytest.mark.skip('not yet')
- def test_python_application_syntax_error(self, skip_alert):
- skip_alert(r'Python failed to import module "wsgi"')
- self.load('syntax_error')
+def test_python_application_body_io():
+ client.load('body_io')
- assert self.get()['status'] == 500, 'syntax error'
+ assert client.get()['body'] == '0123456789', 'body io'
- def test_python_application_loading_error(self, skip_alert):
- skip_alert(r'Python failed to import module "blah"')
- self.load('empty', module="blah")
+def test_python_application_body_io_file():
+ client.load('body_io_file')
- assert self.get()['status'] == 503, 'loading error'
+ assert client.get()['body'] == 'body\n', 'body io file'
- def test_python_application_close(self):
- self.load('close')
- self.get()
+@pytest.mark.skip('not yet')
+def test_python_application_syntax_error(skip_alert):
+ skip_alert(r'Python failed to import module "wsgi"')
+ client.load('syntax_error')
- assert self.wait_for_record(r'Close called\.') is not None, 'close'
+ assert client.get()['status'] == 500, 'syntax error'
- def test_python_application_close_error(self):
- self.load('close_error')
- self.get()
+def test_python_application_loading_error(skip_alert):
+ skip_alert(r'Python failed to import module "blah"')
- assert (
- self.wait_for_record(r'Close called\.') is not None
- ), 'close error'
+ client.load('empty', module="blah")
- def test_python_application_not_iterable(self):
- self.load('not_iterable')
+ assert client.get()['status'] == 503, 'loading error'
- self.get()
- assert (
- self.wait_for_record(
- r'\[error\].+the application returned not an iterable object'
- )
- is not None
- ), 'not iterable'
+def test_python_application_close(wait_for_record):
+ client.load('close')
- def test_python_application_write(self):
- self.load('write')
+ client.get()
- assert self.get()['body'] == '0123456789', 'write'
+ assert wait_for_record(r'Close called\.') is not None, 'close'
- def test_python_application_encoding(self):
- self.load('encoding')
- try:
- locales = (
- subprocess.check_output(
- ['locale', '-a'],
- stderr=subprocess.STDOUT,
- )
- .decode()
- .splitlines()
- )
- except (
- FileNotFoundError,
- UnicodeDecodeError,
- subprocess.CalledProcessError,
- ):
- pytest.skip('require locale')
-
- to_check = [
- re.compile(r'.*UTF[-_]?8'),
- re.compile(r'.*ISO[-_]?8859[-_]?1'),
- ]
- matches = [
- loc
- for loc in locales
- if any(pattern.match(loc.upper()) for pattern in to_check)
- ]
-
- if not matches:
- pytest.skip('no available locales')
-
- def unify(str):
- str.upper().replace('-', '').replace('_', '')
-
- for loc in matches:
- assert 'success' in self.conf(
- {"LC_CTYPE": loc, "LC_ALL": ""},
- '/config/applications/encoding/environment',
- )
- resp = self.get()
- assert resp['status'] == 200, 'status'
- assert unify(resp['headers']['X-Encoding']) == unify(
- loc.split('.')[-1]
- )
+def test_python_application_close_error(wait_for_record):
+ client.load('close_error')
- def test_python_application_unicode(self, temp_dir):
- try:
- app_type = self.get_application_type()
- v = version.Version(app_type.split()[-1])
- if v.major != 3:
- raise version.InvalidVersion
+ client.get()
- except version.InvalidVersion:
- pytest.skip('require python module version 3')
+ assert wait_for_record(r'Close called\.') is not None, 'close error'
- venv_path = f'{temp_dir}/venv'
- venv.create(venv_path)
- self.load('unicode')
- assert 'success' in self.conf(
- f'"{venv_path}"',
- '/config/applications/unicode/home',
- )
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Temp-dir': temp_dir,
- 'Connection': 'close',
- }
- )['status']
- == 200
+def test_python_application_not_iterable(wait_for_record):
+ client.load('not_iterable')
+
+ client.get()
+
+ assert (
+ wait_for_record(
+ r'\[error\].+the application returned not an iterable object'
)
+ is not None
+ ), 'not iterable'
- def test_python_application_threading(self):
- """wait_for_record() timeouts after 5s while every thread works at
- least 3s. So without releasing GIL test should fail.
- """
- self.load('threading')
+def test_python_application_write():
+ client.load('write')
- for _ in range(10):
- self.get(no_recv=True)
+ assert client.get()['body'] == '0123456789', 'write'
- assert (
- self.wait_for_record(r'\(5\) Thread: 100', wait=50) is not None
- ), 'last thread finished'
- def test_python_application_iter_exception(self):
- self.load('iter_exception')
+def test_python_application_encoding():
+ client.load('encoding')
+
+ try:
+ locales = (
+ subprocess.check_output(
+ ['locale', '-a'],
+ stderr=subprocess.STDOUT,
+ )
+ .decode()
+ .splitlines()
+ )
+ except (
+ FileNotFoundError,
+ UnicodeDecodeError,
+ subprocess.CalledProcessError,
+ ):
+ pytest.skip('require locale')
+
+ to_check = [
+ re.compile(r'.*UTF[-_]?8'),
+ re.compile(r'.*ISO[-_]?8859[-_]?1'),
+ ]
+ matches = [
+ loc
+ for loc in locales
+ if any(pattern.match(loc.upper()) for pattern in to_check)
+ ]
+
+ if not matches:
+ pytest.skip('no available locales')
+
+ def unify(str):
+ str.upper().replace('-', '').replace('_', '')
+
+ for loc in matches:
+ assert 'success' in client.conf(
+ {"LC_CTYPE": loc, "LC_ALL": ""},
+ '/config/applications/encoding/environment',
+ )
+ resp = client.get()
+ assert resp['status'] == 200, 'status'
+ assert unify(resp['headers']['X-Encoding']) == unify(loc.split('.')[-1])
- # Default request doesn't lead to the exception.
- resp = self.get(
+def test_python_application_unicode(temp_dir):
+ try:
+ app_type = client.get_application_type()
+ v = version.Version(app_type.split()[-1])
+ if v.major != 3:
+ raise version.InvalidVersion
+
+ except version.InvalidVersion:
+ pytest.skip('require python module version 3')
+
+ venv_path = f'{temp_dir}/venv'
+ venv.create(venv_path)
+
+ client.load('unicode')
+ assert 'success' in client.conf(
+ f'"{venv_path}"',
+ '/config/applications/unicode/home',
+ )
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'X-Skip': '9',
- 'X-Chunked': '1',
+ 'Temp-dir': temp_dir,
'Connection': 'close',
}
- )
- assert resp['status'] == 200, 'status'
- assert resp['body'] == 'XXXXXXX', 'body'
+ )['status']
+ == 200
+ )
- # Exception before start_response().
- assert self.get()['status'] == 503, 'error'
+def test_python_application_threading(wait_for_record):
+ """wait_for_record() timeouts after 5s while every thread works at
+ least 3s. So without releasing GIL test should fail.
+ """
- assert self.wait_for_record(r'Traceback') is not None, 'traceback'
- assert (
- self.wait_for_record(r"raise Exception\('first exception'\)")
- is not None
- ), 'first exception raise'
- assert len(self.findall(r'Traceback')) == 1, 'traceback count 1'
+ client.load('threading')
- # Exception after start_response(), before first write().
+ for _ in range(10):
+ client.get(no_recv=True)
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Skip': '1',
- 'Connection': 'close',
- }
- )['status']
- == 503
- ), 'error 2'
+ assert (
+ wait_for_record(r'\(5\) Thread: 100', wait=50) is not None
+ ), 'last thread finished'
- assert (
- self.wait_for_record(r"raise Exception\('second exception'\)")
- is not None
- ), 'exception raise second'
- assert len(self.findall(r'Traceback')) == 2, 'traceback count 2'
- # Exception after first write(), before first __next__().
+def test_python_application_iter_exception(findall, wait_for_record):
+ client.load('iter_exception')
- _, sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Skip': '2',
- 'Connection': 'keep-alive',
- },
- start=True,
- )
+ # Default request doesn't lead to the exception.
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Skip': '9',
+ 'X-Chunked': '1',
+ 'Connection': 'close',
+ }
+ )
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == 'XXXXXXX', 'body'
- assert (
- self.wait_for_record(r"raise Exception\('third exception'\)")
- is not None
- ), 'exception raise third'
- assert len(self.findall(r'Traceback')) == 3, 'traceback count 3'
+ # Exception before start_response().
- assert self.get(sock=sock) == {}, 'closed connection'
+ assert client.get()['status'] == 503, 'error'
- # Exception after first write(), before first __next__(),
- # chunked (incomplete body).
+ assert wait_for_record(r'Traceback') is not None, 'traceback'
+ assert (
+ wait_for_record(r"raise Exception\('first exception'\)") is not None
+ ), 'first exception raise'
+ assert len(findall(r'Traceback')) == 1, 'traceback count 1'
- resp = self.get(
+ # Exception after start_response(), before first write().
+
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'X-Skip': '2',
- 'X-Chunked': '1',
+ 'X-Skip': '1',
'Connection': 'close',
- },
- raw_resp=True,
- )
- if resp:
- assert resp[-5:] != '0\r\n\r\n', 'incomplete body'
- assert len(self.findall(r'Traceback')) == 4, 'traceback count 4'
+ }
+ )['status']
+ == 503
+ ), 'error 2'
- # Exception in __next__().
+ assert (
+ wait_for_record(r"raise Exception\('second exception'\)") is not None
+ ), 'exception raise second'
+ assert len(findall(r'Traceback')) == 2, 'traceback count 2'
- _, sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Skip': '3',
- 'Connection': 'keep-alive',
- },
- start=True,
- )
+ # Exception after first write(), before first __next__().
+
+ _, sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Skip': '2',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ )
+
+ assert (
+ wait_for_record(r"raise Exception\('third exception'\)") is not None
+ ), 'exception raise third'
+ assert len(findall(r'Traceback')) == 3, 'traceback count 3'
+
+ assert client.get(sock=sock) == {}, 'closed connection'
+
+ # Exception after first write(), before first __next__(),
+ # chunked (incomplete body).
+
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Skip': '2',
+ 'X-Chunked': '1',
+ 'Connection': 'close',
+ },
+ raw_resp=True,
+ )
+ if resp:
+ assert resp[-5:] != '0\r\n\r\n', 'incomplete body'
+ assert len(findall(r'Traceback')) == 4, 'traceback count 4'
+
+ # Exception in __next__().
+
+ _, sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Skip': '3',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ )
+
+ assert (
+ wait_for_record(r"raise Exception\('next exception'\)") is not None
+ ), 'exception raise next'
+ assert len(findall(r'Traceback')) == 5, 'traceback count 5'
+
+ assert client.get(sock=sock) == {}, 'closed connection 2'
- assert (
- self.wait_for_record(r"raise Exception\('next exception'\)")
- is not None
- ), 'exception raise next'
- assert len(self.findall(r'Traceback')) == 5, 'traceback count 5'
+ # Exception in __next__(), chunked (incomplete body).
- assert self.get(sock=sock) == {}, 'closed connection 2'
+ resp = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Skip': '3',
+ 'X-Chunked': '1',
+ 'Connection': 'close',
+ },
+ raw_resp=True,
+ )
+ if resp:
+ assert resp[-5:] != '0\r\n\r\n', 'incomplete body 2'
+ assert len(findall(r'Traceback')) == 6, 'traceback count 6'
- # Exception in __next__(), chunked (incomplete body).
+ # Exception before start_response() and in close().
- resp = self.get(
+ assert (
+ client.get(
headers={
'Host': 'localhost',
- 'X-Skip': '3',
- 'X-Chunked': '1',
+ 'X-Not-Skip-Close': '1',
'Connection': 'close',
- },
- raw_resp=True,
- )
- if resp:
- assert resp[-5:] != '0\r\n\r\n', 'incomplete body 2'
- assert len(self.findall(r'Traceback')) == 6, 'traceback count 6'
+ }
+ )['status']
+ == 503
+ ), 'error'
- # Exception before start_response() and in close().
+ assert (
+ wait_for_record(r"raise Exception\('close exception'\)") is not None
+ ), 'exception raise close'
+ assert len(findall(r'Traceback')) == 8, 'traceback count 8'
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Not-Skip-Close': '1',
- 'Connection': 'close',
- }
- )['status']
- == 503
- ), 'error'
- assert (
- self.wait_for_record(r"raise Exception\('close exception'\)")
- is not None
- ), 'exception raise close'
- assert len(self.findall(r'Traceback')) == 8, 'traceback count 8'
+def test_python_user_group(require):
+ require({'privileged_user': True})
- def test_python_user_group(self, is_su):
- if not is_su:
- pytest.skip('requires root')
+ nobody_uid = pwd.getpwnam('nobody').pw_uid
- nobody_uid = pwd.getpwnam('nobody').pw_uid
+ group = 'nobody'
- group = 'nobody'
+ try:
+ group_id = grp.getgrnam(group).gr_gid
+ except KeyError:
+ group = 'nogroup'
+ group_id = grp.getgrnam(group).gr_gid
- try:
- group_id = grp.getgrnam(group).gr_gid
- except KeyError:
- group = 'nogroup'
- group_id = grp.getgrnam(group).gr_gid
+ client.load('user_group')
- self.load('user_group')
+ obj = client.getjson()['body']
+ assert obj['UID'] == nobody_uid, 'nobody uid'
+ assert obj['GID'] == group_id, 'nobody gid'
- obj = self.getjson()['body']
- assert obj['UID'] == nobody_uid, 'nobody uid'
- assert obj['GID'] == group_id, 'nobody gid'
+ client.load('user_group', user='nobody')
- self.load('user_group', user='nobody')
+ obj = client.getjson()['body']
+ assert obj['UID'] == nobody_uid, 'nobody uid user=nobody'
+ assert obj['GID'] == group_id, 'nobody gid user=nobody'
- obj = self.getjson()['body']
- assert obj['UID'] == nobody_uid, 'nobody uid user=nobody'
- assert obj['GID'] == group_id, 'nobody gid user=nobody'
+ client.load('user_group', user='nobody', group=group)
- self.load('user_group', user='nobody', group=group)
+ obj = client.getjson()['body']
+ assert obj['UID'] == nobody_uid, f'nobody uid user=nobody group={group}'
+ assert obj['GID'] == group_id, f'nobody gid user=nobody group={group}'
- obj = self.getjson()['body']
- assert obj['UID'] == nobody_uid, f'nobody uid user=nobody group={group}'
- assert obj['GID'] == group_id, f'nobody gid user=nobody group={group}'
+ client.load('user_group', group=group)
- self.load('user_group', group=group)
+ obj = client.getjson()['body']
+ assert obj['UID'] == nobody_uid, f'nobody uid group={group}'
+ assert obj['GID'] == group_id, f'nobody gid group={group}'
- obj = self.getjson()['body']
- assert obj['UID'] == nobody_uid, f'nobody uid group={group}'
- assert obj['GID'] == group_id, f'nobody gid group={group}'
+ client.load('user_group', user='root')
- self.load('user_group', user='root')
+ obj = client.getjson()['body']
+ assert obj['UID'] == 0, 'root uid user=root'
+ assert obj['GID'] == 0, 'root gid user=root'
- obj = self.getjson()['body']
- assert obj['UID'] == 0, 'root uid user=root'
- assert obj['GID'] == 0, 'root gid user=root'
+ group = 'root'
- group = 'root'
+ try:
+ grp.getgrnam(group)
+ group = True
+ except KeyError:
+ group = False
- try:
- grp.getgrnam(group)
- group = True
- except KeyError:
- group = False
+ if group:
+ client.load('user_group', user='root', group='root')
- if group:
- self.load('user_group', user='root', group='root')
+ obj = client.getjson()['body']
+ assert obj['UID'] == 0, 'root uid user=root group=root'
+ assert obj['GID'] == 0, 'root gid user=root group=root'
- obj = self.getjson()['body']
- assert obj['UID'] == 0, 'root uid user=root group=root'
- assert obj['GID'] == 0, 'root gid user=root group=root'
+ client.load('user_group', group='root')
- self.load('user_group', group='root')
+ obj = client.getjson()['body']
+ assert obj['UID'] == nobody_uid, 'root uid group=root'
+ assert obj['GID'] == 0, 'root gid group=root'
- obj = self.getjson()['body']
- assert obj['UID'] == nobody_uid, 'root uid group=root'
- assert obj['GID'] == 0, 'root gid group=root'
- def test_python_application_callable(self, skip_alert):
- skip_alert(r'Python failed to get "blah" from module')
- self.load('callable')
+def test_python_application_callable(skip_alert):
+ skip_alert(r'Python failed to get "blah" from module')
+ client.load('callable')
- assert self.get()['status'] == 204, 'default application response'
+ assert client.get()['status'] == 204, 'default application response'
- self.load('callable', callable="app")
+ client.load('callable', callable="app")
- assert self.get()['status'] == 200, 'callable response'
+ assert client.get()['status'] == 200, 'callable response'
- self.load('callable', callable="blah")
+ client.load('callable', callable="blah")
- assert self.get()['status'] not in [200, 204], 'callable response inv'
+ assert client.get()['status'] not in [200, 204], 'callable response inv'
- def test_python_application_path(self):
- self.load('path')
- def set_path(path):
- assert 'success' in self.conf(path, 'applications/path/path')
+def test_python_application_path():
+ client.load('path')
- def get_path():
- return self.get()['body'].split(os.pathsep)
+ def set_path(path):
+ assert 'success' in client.conf(path, 'applications/path/path')
- default_path = self.conf_get('/config/applications/path/path')
- assert 'success' in self.conf(
- {"PYTHONPATH": default_path},
- '/config/applications/path/environment',
- )
+ def get_path():
+ return client.get()['body'].split(os.pathsep)
- self.conf_delete('/config/applications/path/path')
- sys_path = get_path()
+ default_path = client.conf_get('/config/applications/path/path')
+ assert 'success' in client.conf(
+ {"PYTHONPATH": default_path},
+ '/config/applications/path/environment',
+ )
- set_path('"/blah"')
- assert ['/blah', *sys_path] == get_path(), 'check path'
+ client.conf_delete('/config/applications/path/path')
+ sys_path = get_path()
- set_path('"/new"')
- assert ['/new', *sys_path] == get_path(), 'check path update'
+ set_path('"/blah"')
+ assert ['/blah', *sys_path] == get_path(), 'check path'
- set_path('["/blah1", "/blah2"]')
- assert [
- '/blah1',
- '/blah2',
- *sys_path,
- ] == get_path(), 'check path array'
+ set_path('"/new"')
+ assert ['/new', *sys_path] == get_path(), 'check path update'
- def test_python_application_path_invalid(self):
- self.load('path')
+ set_path('["/blah1", "/blah2"]')
+ assert [
+ '/blah1',
+ '/blah2',
+ *sys_path,
+ ] == get_path(), 'check path array'
- def check_path(path):
- assert 'error' in self.conf(path, 'applications/path/path')
- check_path('{}')
- check_path('["/blah", []]')
+def test_python_application_path_invalid():
+ client.load('path')
- def test_python_application_threads(self):
- self.load('threads', threads=4)
+ def check_path(path):
+ assert 'error' in client.conf(path, 'applications/path/path')
- socks = []
+ check_path('{}')
+ check_path('["/blah", []]')
- for i in range(4):
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
- },
- no_recv=True,
- )
- socks.append(sock)
+def test_python_application_threads():
+ client.load('threads', threads=4)
- threads = set()
+ socks = []
- for sock in socks:
- resp = self.recvall(sock).decode('utf-8')
+ for _ in range(4):
+ sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+
+ socks.append(sock)
+
+ threads = set()
- self.log_in(resp)
+ for sock in socks:
+ resp = client.recvall(sock).decode('utf-8')
- resp = self._resp_to_dict(resp)
+ client.log_in(resp)
- assert resp['status'] == 200, 'status'
+ resp = client._resp_to_dict(resp)
+
+ assert resp['status'] == 200, 'status'
- threads.add(resp['headers']['X-Thread'])
+ threads.add(resp['headers']['X-Thread'])
- assert resp['headers']['Wsgi-Multithread'] == 'True', 'multithread'
+ assert resp['headers']['Wsgi-Multithread'] == 'True', 'multithread'
- sock.close()
+ sock.close()
- assert len(socks) == len(threads), 'threads differs'
+ assert len(socks) == len(threads), 'threads differs'
diff --git a/test/test_python_basic.py b/test/test_python_basic.py
index e661a89c..37859c8c 100644
--- a/test/test_python_basic.py
+++ b/test/test_python_basic.py
@@ -1,126 +1,134 @@
-from unit.control import TestControl
+from unit.control import Control
+prerequisites = {'modules': {'python': 'any'}}
-class TestPythonBasic(TestControl):
- prerequisites = {'modules': {'python': 'any'}}
+client = Control()
- conf_app = {
+conf_app = {
+ "app": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+}
+
+conf_basic = {
+ "listeners": {"*:7080": {"pass": "applications/app"}},
+ "applications": conf_app,
+}
+
+
+def test_python_get_empty():
+ assert client.conf_get() == {'listeners': {}, 'applications': {}}
+ assert client.conf_get('listeners') == {}
+ assert client.conf_get('applications') == {}
+
+
+def test_python_get_applications():
+ client.conf(conf_app, 'applications')
+
+ conf = client.conf_get()
+
+ assert conf['listeners'] == {}, 'listeners'
+ assert conf['applications'] == {
"app": {
"type": "python",
"processes": {"spare": 0},
"path": "/app",
"module": "wsgi",
}
- }
+ }, 'applications'
- conf_basic = {
- "listeners": {"*:7080": {"pass": "applications/app"}},
- "applications": conf_app,
- }
-
- def test_python_get_empty(self):
- assert self.conf_get() == {'listeners': {}, 'applications': {}}
- assert self.conf_get('listeners') == {}
- assert self.conf_get('applications') == {}
-
- def test_python_get_applications(self):
- self.conf(self.conf_app, 'applications')
-
- conf = self.conf_get()
-
- assert conf['listeners'] == {}, 'listeners'
- assert conf['applications'] == {
- "app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- }, 'applications'
-
- assert self.conf_get('applications') == {
- "app": {
- "type": "python",
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- }, 'applications prefix'
-
- assert self.conf_get('applications/app') == {
+ assert client.conf_get('applications') == {
+ "app": {
"type": "python",
"processes": {"spare": 0},
"path": "/app",
"module": "wsgi",
- }, 'applications prefix 2'
-
- assert self.conf_get('applications/app/type') == 'python', 'type'
- assert self.conf_get('applications/app/processes/spare') == 0, 'spare'
-
- def test_python_get_listeners(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert self.conf_get()['listeners'] == {
- "*:7080": {"pass": "applications/app"}
- }, 'listeners'
-
- assert self.conf_get('listeners') == {
- "*:7080": {"pass": "applications/app"}
- }, 'listeners prefix'
-
- assert self.conf_get('listeners/*:7080') == {
- "pass": "applications/app"
- }, 'listeners prefix 2'
-
- def test_python_change_listener(self):
- assert 'success' in self.conf(self.conf_basic)
- assert 'success' in self.conf(
- {"*:7081": {"pass": "applications/app"}}, 'listeners'
- )
-
- assert self.conf_get('listeners') == {
- "*:7081": {"pass": "applications/app"}
- }, 'change listener'
-
- def test_python_add_listener(self):
- assert 'success' in self.conf(self.conf_basic)
- assert 'success' in self.conf(
- {"pass": "applications/app"}, 'listeners/*:7082'
- )
-
- assert self.conf_get('listeners') == {
- "*:7080": {"pass": "applications/app"},
- "*:7082": {"pass": "applications/app"},
- }, 'add listener'
-
- def test_python_change_application(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'success' in self.conf('30', 'applications/app/processes/max')
- assert (
- self.conf_get('applications/app/processes/max') == 30
- ), 'change application max'
-
- assert 'success' in self.conf('"/www"', 'applications/app/path')
- assert (
- self.conf_get('applications/app/path') == '/www'
- ), 'change application path'
-
- def test_python_delete(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'error' in self.conf_delete('applications/app')
- assert 'success' in self.conf_delete('listeners/*:7080')
- assert 'success' in self.conf_delete('applications/app')
- assert 'error' in self.conf_delete('applications/app')
-
- def test_python_delete_blocks(self):
- assert 'success' in self.conf(self.conf_basic)
-
- assert 'success' in self.conf_delete('listeners')
- assert 'success' in self.conf_delete('applications')
-
- assert 'success' in self.conf(self.conf_app, 'applications')
- assert 'success' in self.conf(
- {"*:7081": {"pass": "applications/app"}}, 'listeners'
- ), 'applications restore'
+ }
+ }, 'applications prefix'
+
+ assert client.conf_get('applications/app') == {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }, 'applications prefix 2'
+
+ assert client.conf_get('applications/app/type') == 'python', 'type'
+ assert client.conf_get('applications/app/processes/spare') == 0, 'spare'
+
+
+def test_python_get_listeners():
+ assert 'success' in client.conf(conf_basic)
+
+ assert client.conf_get()['listeners'] == {
+ "*:7080": {"pass": "applications/app"}
+ }, 'listeners'
+
+ assert client.conf_get('listeners') == {
+ "*:7080": {"pass": "applications/app"}
+ }, 'listeners prefix'
+
+ assert client.conf_get('listeners/*:7080') == {
+ "pass": "applications/app"
+ }, 'listeners prefix 2'
+
+
+def test_python_change_listener():
+ assert 'success' in client.conf(conf_basic)
+ assert 'success' in client.conf(
+ {"*:7081": {"pass": "applications/app"}}, 'listeners'
+ )
+
+ assert client.conf_get('listeners') == {
+ "*:7081": {"pass": "applications/app"}
+ }, 'change listener'
+
+
+def test_python_add_listener():
+ assert 'success' in client.conf(conf_basic)
+ assert 'success' in client.conf(
+ {"pass": "applications/app"}, 'listeners/*:7082'
+ )
+
+ assert client.conf_get('listeners') == {
+ "*:7080": {"pass": "applications/app"},
+ "*:7082": {"pass": "applications/app"},
+ }, 'add listener'
+
+
+def test_python_change_application():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'success' in client.conf('30', 'applications/app/processes/max')
+ assert (
+ client.conf_get('applications/app/processes/max') == 30
+ ), 'change application max'
+
+ assert 'success' in client.conf('"/www"', 'applications/app/path')
+ assert (
+ client.conf_get('applications/app/path') == '/www'
+ ), 'change application path'
+
+
+def test_python_delete():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'error' in client.conf_delete('applications/app')
+ assert 'success' in client.conf_delete('listeners/*:7080')
+ assert 'success' in client.conf_delete('applications/app')
+ assert 'error' in client.conf_delete('applications/app')
+
+
+def test_python_delete_blocks():
+ assert 'success' in client.conf(conf_basic)
+
+ assert 'success' in client.conf_delete('listeners')
+ assert 'success' in client.conf_delete('applications')
+
+ assert 'success' in client.conf(conf_app, 'applications')
+ assert 'success' in client.conf(
+ {"*:7081": {"pass": "applications/app"}}, 'listeners'
+ ), 'applications restore'
diff --git a/test/test_python_environment.py b/test/test_python_environment.py
index bce72c4d..6aa02c94 100644
--- a/test/test_python_environment.py
+++ b/test/test_python_environment.py
@@ -1,155 +1,162 @@
-from unit.applications.lang.python import TestApplicationPython
-
-
-class TestPythonEnvironment(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
-
- def test_python_environment_name_null(self):
- self.load('environment')
-
- assert 'error' in self.conf(
- {"va\0r": "val1"}, 'applications/environment/environment'
- ), 'name null'
-
- def test_python_environment_name_equals(self):
- self.load('environment')
-
- assert 'error' in self.conf(
- {"var=": "val1"}, 'applications/environment/environment'
- ), 'name equals'
-
- def test_python_environment_value_null(self):
- self.load('environment')
-
- assert 'error' in self.conf(
- {"var": "\0val"}, 'applications/environment/environment'
- ), 'value null'
-
- def test_python_environment_update(self):
- self.load('environment')
-
- self.conf({"var": "val1"}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var',
- 'Connection': 'close',
- }
- )['body']
- == 'val1'
- ), 'set'
-
- self.conf({"var": "val2"}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var',
- 'Connection': 'close',
- }
- )['body']
- == 'val2'
- ), 'update'
-
- def test_python_environment_replace(self):
- self.load('environment')
-
- self.conf({"var1": "val1"}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var1',
- 'Connection': 'close',
- }
- )['body']
- == 'val1'
- ), 'set'
-
- self.conf({"var2": "val2"}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var1,var2',
- 'Connection': 'close',
- }
- )['body']
- == 'val2'
- ), 'replace'
-
- def test_python_environment_clear(self):
- self.load('environment')
-
- self.conf(
- {"var1": "val1", "var2": "val2"},
- 'applications/environment/environment',
- )
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var1,var2',
- 'Connection': 'close',
- }
- )['body']
- == 'val1,val2'
- ), 'set'
-
- self.conf({}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'var1,var2',
- 'Connection': 'close',
- }
- )['body']
- == ''
- ), 'clear'
-
- def test_python_environment_replace_default(self):
- self.load('environment')
-
- home_default = self.get(
+from unit.applications.lang.python import ApplicationPython
+
+prerequisites = {'modules': {'python': 'any'}}
+
+client = ApplicationPython()
+
+
+def test_python_environment_name_null():
+ client.load('environment')
+
+ assert 'error' in client.conf(
+ {"va\0r": "val1"}, 'applications/environment/environment'
+ ), 'name null'
+
+
+def test_python_environment_name_equals():
+ client.load('environment')
+
+ assert 'error' in client.conf(
+ {"var=": "val1"}, 'applications/environment/environment'
+ ), 'name equals'
+
+
+def test_python_environment_value_null():
+ client.load('environment')
+
+ assert 'error' in client.conf(
+ {"var": "\0val"}, 'applications/environment/environment'
+ ), 'value null'
+
+
+def test_python_environment_update():
+ client.load('environment')
+
+ client.conf({"var": "val1"}, 'applications/environment/environment')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var',
+ 'Connection': 'close',
+ }
+ )['body']
+ == 'val1'
+ ), 'set'
+
+ client.conf({"var": "val2"}, 'applications/environment/environment')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var',
+ 'Connection': 'close',
+ }
+ )['body']
+ == 'val2'
+ ), 'update'
+
+
+def test_python_environment_replace():
+ client.load('environment')
+
+ client.conf({"var1": "val1"}, 'applications/environment/environment')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var1',
+ 'Connection': 'close',
+ }
+ )['body']
+ == 'val1'
+ ), 'set'
+
+ client.conf({"var2": "val2"}, 'applications/environment/environment')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var1,var2',
+ 'Connection': 'close',
+ }
+ )['body']
+ == 'val2'
+ ), 'replace'
+
+
+def test_python_environment_clear():
+ client.load('environment')
+
+ client.conf(
+ {"var1": "val1", "var2": "val2"},
+ 'applications/environment/environment',
+ )
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var1,var2',
+ 'Connection': 'close',
+ }
+ )['body']
+ == 'val1,val2'
+ ), 'set'
+
+ client.conf({}, 'applications/environment/environment')
+
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'var1,var2',
+ 'Connection': 'close',
+ }
+ )['body']
+ == ''
+ ), 'clear'
+
+
+def test_python_environment_replace_default():
+ client.load('environment')
+
+ home_default = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'HOME',
+ 'Connection': 'close',
+ }
+ )['body']
+
+ assert len(home_default) > 1, 'get default'
+
+ client.conf({"HOME": "/"}, 'applications/environment/environment')
+
+ assert (
+ client.get(
headers={
'Host': 'localhost',
'X-Variables': 'HOME',
'Connection': 'close',
}
)['body']
+ == '/'
+ ), 'replace default'
+
+ client.conf({}, 'applications/environment/environment')
- assert len(home_default) > 1, 'get default'
-
- self.conf({"HOME": "/"}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'HOME',
- 'Connection': 'close',
- }
- )['body']
- == '/'
- ), 'replace default'
-
- self.conf({}, 'applications/environment/environment')
-
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'X-Variables': 'HOME',
- 'Connection': 'close',
- }
- )['body']
- == home_default
- ), 'restore default'
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Variables': 'HOME',
+ 'Connection': 'close',
+ }
+ )['body']
+ == home_default
+ ), 'restore default'
diff --git a/test/test_python_isolation.py b/test/test_python_isolation.py
index c524aea0..260a87a2 100644
--- a/test/test_python_isolation.py
+++ b/test/test_python_isolation.py
@@ -4,224 +4,211 @@ import subprocess
from pathlib import Path
import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
from unit.utils import findmnt
from unit.utils import waitformount
from unit.utils import waitforunmount
+prerequisites = {'modules': {'python': 'any'}, 'features': {'isolation': True}}
-class TestPythonIsolation(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}, 'features': ['isolation']}
+client = ApplicationPython()
- def get_cgroup(self, app_name):
- output = subprocess.check_output(
- ['ps', 'ax', '-o', 'pid', '-o', 'cmd']
- ).decode()
- pid = re.search(
- fr'(\d+)\s*unit: "{app_name}" application', output
- ).group(1)
+def get_cgroup(app_name):
+ output = subprocess.check_output(
+ ['ps', 'ax', '-o', 'pid', '-o', 'cmd']
+ ).decode()
- cgroup = f'/proc/{pid}/cgroup'
+ pid = re.search(fr'(\d+)\s*unit: "{app_name}" application', output).group(1)
- if not os.path.isfile(cgroup):
- pytest.skip(f'no cgroup at {cgroup}')
+ cgroup = f'/proc/{pid}/cgroup'
- with open(cgroup, 'r') as f:
- return f.read().rstrip()
+ if not os.path.isfile(cgroup):
+ pytest.skip(f'no cgroup at {cgroup}')
- def test_python_isolation_rootfs(self, is_su, temp_dir):
- isolation_features = option.available['features']['isolation'].keys()
+ with open(cgroup, 'r') as f:
+ return f.read().rstrip()
- if not is_su:
- if not 'unprivileged_userns_clone' in isolation_features:
- pytest.skip('requires unprivileged userns or root')
- if 'user' not in isolation_features:
- pytest.skip('user namespace is not supported')
+def test_python_isolation_rootfs(is_su, require, temp_dir):
+ isolation = {'rootfs': temp_dir}
- if 'mnt' not in isolation_features:
- pytest.skip('mnt namespace is not supported')
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
+ }
+ )
- if 'pid' not in isolation_features:
- pytest.skip('pid namespace is not supported')
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
- isolation = {'rootfs': temp_dir}
+ client.load('ns_inspect', isolation=isolation)
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
- }
+ assert not (
+ client.getjson(url=f'/?path={temp_dir}')['body']['FileExists']
+ ), 'temp_dir does not exists in rootfs'
- self.load('ns_inspect', isolation=isolation)
+ assert client.getjson(url='/?path=/proc/self')['body'][
+ 'FileExists'
+ ], 'no /proc/self'
- assert (
- self.getjson(url=f'/?path={temp_dir}')['body']['FileExists']
- == False
- ), 'temp_dir does not exists in rootfs'
+ assert not (
+ client.getjson(url='/?path=/dev/pts')['body']['FileExists']
+ ), 'no /dev/pts'
- assert (
- self.getjson(url='/?path=/proc/self')['body']['FileExists'] == True
- ), 'no /proc/self'
+ assert not (
+ client.getjson(url='/?path=/sys/kernel')['body']['FileExists']
+ ), 'no /sys/kernel'
- assert (
- self.getjson(url='/?path=/dev/pts')['body']['FileExists'] == False
- ), 'no /dev/pts'
+ ret = client.getjson(url='/?path=/app/python/ns_inspect')
- assert (
- self.getjson(url='/?path=/sys/kernel')['body']['FileExists']
- == False
- ), 'no /sys/kernel'
+ assert ret['body']['FileExists'], 'application exists in rootfs'
- ret = self.getjson(url='/?path=/app/python/ns_inspect')
- assert ret['body']['FileExists'] == True, 'application exists in rootfs'
+def test_python_isolation_rootfs_no_language_deps(require, temp_dir):
+ require({'privileged_user': True})
- def test_python_isolation_rootfs_no_language_deps(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
+ isolation = {'rootfs': temp_dir, 'automount': {'language_deps': False}}
+ client.load('empty', isolation=isolation)
- isolation = {'rootfs': temp_dir, 'automount': {'language_deps': False}}
- self.load('empty', isolation=isolation)
+ python_path = f'{temp_dir}/usr'
- python_path = f'{temp_dir}/usr'
+ assert findmnt().find(python_path) == -1
+ assert client.get()['status'] != 200, 'disabled language_deps'
+ assert findmnt().find(python_path) == -1
- assert findmnt().find(python_path) == -1
- assert self.get()['status'] != 200, 'disabled language_deps'
- assert findmnt().find(python_path) == -1
+ isolation['automount']['language_deps'] = True
- isolation['automount']['language_deps'] = True
+ client.load('empty', isolation=isolation)
- self.load('empty', isolation=isolation)
+ assert findmnt().find(python_path) == -1
+ assert client.get()['status'] == 200, 'enabled language_deps'
+ assert waitformount(python_path), 'language_deps mount'
- assert findmnt().find(python_path) == -1
- assert self.get()['status'] == 200, 'enabled language_deps'
- assert waitformount(python_path), 'language_deps mount'
+ client.conf({"listeners": {}, "applications": {}})
- self.conf({"listeners": {}, "applications": {}})
+ assert waitforunmount(python_path), 'language_deps unmount'
- assert waitforunmount(python_path), 'language_deps unmount'
- def test_python_isolation_procfs(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
+def test_python_isolation_procfs(require, temp_dir):
+ require({'privileged_user': True})
- isolation = {'rootfs': temp_dir, 'automount': {'procfs': False}}
+ isolation = {'rootfs': temp_dir, 'automount': {'procfs': False}}
- self.load('ns_inspect', isolation=isolation)
+ client.load('ns_inspect', isolation=isolation)
- assert (
- self.getjson(url='/?path=/proc/self')['body']['FileExists'] == False
- ), 'no /proc/self'
+ assert not (
+ client.getjson(url='/?path=/proc/self')['body']['FileExists']
+ ), 'no /proc/self'
- isolation['automount']['procfs'] = True
+ isolation['automount']['procfs'] = True
- self.load('ns_inspect', isolation=isolation)
+ client.load('ns_inspect', isolation=isolation)
- assert (
- self.getjson(url='/?path=/proc/self')['body']['FileExists'] == True
- ), '/proc/self'
+ assert client.getjson(url='/?path=/proc/self')['body'][
+ 'FileExists'
+ ], '/proc/self'
- def test_python_isolation_cgroup(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
- if not 'cgroup' in option.available['features']['isolation']:
- pytest.skip('cgroup is not supported')
+def test_python_isolation_cgroup(require):
+ require({'privileged_user': True, 'features': {'isolation': ['cgroup']}})
- def set_cgroup_path(path):
- isolation = {'cgroup': {'path': path}}
- self.load('empty', processes=1, isolation=isolation)
+ def set_cgroup_path(path):
+ isolation = {'cgroup': {'path': path}}
+ client.load('empty', processes=1, isolation=isolation)
- set_cgroup_path('scope/python')
+ set_cgroup_path('scope/python')
- cgroup_rel = Path(self.get_cgroup('empty'))
- assert cgroup_rel.parts[-2:] == ('scope', 'python'), 'cgroup rel'
+ cgroup_rel = Path(get_cgroup('empty'))
+ assert cgroup_rel.parts[-2:] == ('scope', 'python'), 'cgroup rel'
- set_cgroup_path('/scope2/python')
+ set_cgroup_path('/scope2/python')
- cgroup_abs = Path(self.get_cgroup('empty'))
- assert cgroup_abs.parts[-2:] == ('scope2', 'python'), 'cgroup abs'
+ cgroup_abs = Path(get_cgroup('empty'))
+ assert cgroup_abs.parts[-2:] == ('scope2', 'python'), 'cgroup abs'
- assert len(cgroup_rel.parts) >= len(cgroup_abs.parts)
+ assert len(cgroup_rel.parts) >= len(cgroup_abs.parts)
- def test_python_isolation_cgroup_two(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
- if not 'cgroup' in option.available['features']['isolation']:
- pytest.skip('cgroup is not supported')
+def test_python_isolation_cgroup_two(require):
+ require({'privileged_user': True, 'features': {'isolation': ['cgroup']}})
- def set_two_cgroup_path(path, path2):
- script_path = f'{option.test_dir}/python/empty'
+ def set_two_cgroup_path(path, path2):
+ script_path = f'{option.test_dir}/python/empty'
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "applications/one"},
- "*:7081": {"pass": "applications/two"},
- },
- "applications": {
- "one": {
- "type": "python",
- "processes": 1,
- "path": script_path,
- "working_directory": script_path,
- "module": "wsgi",
- "isolation": {
- 'cgroup': {'path': path},
- },
- },
- "two": {
- "type": "python",
- "processes": 1,
- "path": script_path,
- "working_directory": script_path,
- "module": "wsgi",
- "isolation": {
- 'cgroup': {'path': path2},
- },
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "applications/one"},
+ "*:7081": {"pass": "applications/two"},
+ },
+ "applications": {
+ "one": {
+ "type": "python",
+ "processes": 1,
+ "path": script_path,
+ "working_directory": script_path,
+ "module": "wsgi",
+ "isolation": {
+ 'cgroup': {'path': path},
},
},
- }
- )
-
- set_two_cgroup_path('/scope/python', '/scope/python')
- assert self.get_cgroup('one') == self.get_cgroup('two')
-
- set_two_cgroup_path('/scope/python', '/scope2/python')
- assert self.get_cgroup('one') != self.get_cgroup('two')
-
- def test_python_isolation_cgroup_invalid(self, is_su):
- if not is_su:
- pytest.skip('requires root')
-
- if not 'cgroup' in option.available['features']['isolation']:
- pytest.skip('cgroup is not supported')
-
- def check_invalid(path):
- script_path = f'{option.test_dir}/python/empty'
- assert 'error' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "applications/empty"}},
- "applications": {
- "empty": {
- "type": "python",
- "processes": {"spare": 0},
- "path": script_path,
- "working_directory": script_path,
- "module": "wsgi",
- "isolation": {
- 'cgroup': {'path': path},
- },
- }
+ "two": {
+ "type": "python",
+ "processes": 1,
+ "path": script_path,
+ "working_directory": script_path,
+ "module": "wsgi",
+ "isolation": {
+ 'cgroup': {'path': path2},
+ },
},
- }
- )
+ },
+ }
+ )
+
+ set_two_cgroup_path('/scope/python', '/scope/python')
+ assert get_cgroup('one') == get_cgroup('two')
+
+ set_two_cgroup_path('/scope/python', '/scope2/python')
+ assert get_cgroup('one') != get_cgroup('two')
+
+
+def test_python_isolation_cgroup_invalid(require):
+ require({'privileged_user': True, 'features': {'isolation': ['cgroup']}})
+
+ def check_invalid(path):
+ script_path = f'{option.test_dir}/python/empty'
+ assert 'error' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "applications/empty"}},
+ "applications": {
+ "empty": {
+ "type": "python",
+ "processes": {"spare": 0},
+ "path": script_path,
+ "working_directory": script_path,
+ "module": "wsgi",
+ "isolation": {
+ 'cgroup': {'path': path},
+ },
+ }
+ },
+ }
+ )
- check_invalid('')
- check_invalid('../scope')
- check_invalid('scope/../python')
+ check_invalid('')
+ check_invalid('../scope')
+ check_invalid('scope/../python')
diff --git a/test/test_python_isolation_chroot.py b/test/test_python_isolation_chroot.py
index 349ec869..60fac5ef 100644
--- a/test/test_python_isolation_chroot.py
+++ b/test/test_python_isolation_chroot.py
@@ -1,38 +1,29 @@
-import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'any'}, 'privileged_user': True}
-class TestPythonIsolation(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def test_python_isolation_chroot(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
- isolation = {
- 'rootfs': temp_dir,
- }
+def test_python_isolation_chroot(temp_dir):
+ client.load('ns_inspect', isolation={'rootfs': temp_dir})
- self.load('ns_inspect', isolation=isolation)
+ assert not (
+ client.getjson(url=f'/?path={temp_dir}')['body']['FileExists']
+ ), 'temp_dir does not exists in rootfs'
- assert (
- self.getjson(url=f'/?path={temp_dir}')['body']['FileExists']
- == False
- ), 'temp_dir does not exists in rootfs'
+ assert client.getjson(url='/?path=/proc/self')['body'][
+ 'FileExists'
+ ], 'no /proc/self'
- assert (
- self.getjson(url='/?path=/proc/self')['body']['FileExists'] == True
- ), 'no /proc/self'
+ assert not (
+ client.getjson(url='/?path=/dev/pts')['body']['FileExists']
+ ), 'no /dev/pts'
- assert (
- self.getjson(url='/?path=/dev/pts')['body']['FileExists'] == False
- ), 'no /dev/pts'
+ assert not (
+ client.getjson(url='/?path=/sys/kernel')['body']['FileExists']
+ ), 'no /sys/kernel'
- assert (
- self.getjson(url='/?path=/sys/kernel')['body']['FileExists']
- == False
- ), 'no /sys/kernel'
+ ret = client.getjson(url='/?path=/app/python/ns_inspect')
- ret = self.getjson(url='/?path=/app/python/ns_inspect')
-
- assert ret['body']['FileExists'] == True, 'application exists in rootfs'
+ assert ret['body']['FileExists'], 'application exists in rootfs'
diff --git a/test/test_python_procman.py b/test/test_python_procman.py
index d69123ef..4643a9b8 100644
--- a/test/test_python_procman.py
+++ b/test/test_python_procman.py
@@ -4,281 +4,299 @@ import subprocess
import time
import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'any'}}
-class TestPythonProcman(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def setup_method(self):
- self.app_name = f'app-{option.temp_dir.split("/")[-1]}'
- self.app_proc = f'applications/{self.app_name}/processes'
- self.load('empty', self.app_name)
- def pids_for_process(self):
- time.sleep(0.2)
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ client.app_name = f'app-{temp_dir.split("/")[-1]}'
+ client.app_proc = f'applications/{client.app_name}/processes'
+ client.load('empty', client.app_name)
- output = subprocess.check_output(['ps', 'ax'])
- pids = set()
- for m in re.findall(
- fr'.*unit: "{self.app_name}" application', output.decode()
- ):
- pids.add(re.search(r'^\s*(\d+)', m).group(1))
+def pids_for_process():
+ time.sleep(0.2)
- return pids
+ output = subprocess.check_output(['ps', 'ax'])
- def conf_proc(self, conf, path=None):
- if path is None:
- path = self.app_proc
+ pids = set()
+ for m in re.findall(
+ fr'.*unit: "{client.app_name}" application', output.decode()
+ ):
+ pids.add(re.search(r'^\s*(\d+)', m).group(1))
- assert 'success' in self.conf(conf, path), 'configure processes'
+ return pids
- @pytest.mark.skip('not yet')
- def test_python_processes_idle_timeout_zero(self):
- self.conf_proc({"spare": 0, "max": 2, "idle_timeout": 0})
- self.get()
- assert len(self.pids_for_process()) == 0, 'idle timeout 0'
+def conf_proc(conf, path=None):
+ if path is None:
+ path = client.app_proc
- def test_python_prefork(self):
- self.conf_proc('2')
+ assert 'success' in client.conf(conf, path), 'configure processes'
- pids = self.pids_for_process()
- assert len(pids) == 2, 'prefork 2'
- self.get()
- assert self.pids_for_process() == pids, 'prefork still 2'
+def stop_all():
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
- self.conf_proc('4')
+ assert len(pids_for_process()) == 0, 'stop all'
- pids = self.pids_for_process()
- assert len(pids) == 4, 'prefork 4'
- self.get()
- assert self.pids_for_process() == pids, 'prefork still 4'
+@pytest.mark.skip('not yet')
+def test_python_processes_idle_timeout_zero():
+ conf_proc({"spare": 0, "max": 2, "idle_timeout": 0})
- self.stop_all()
+ client.get()
+ assert len(pids_for_process()) == 0, 'idle timeout 0'
- @pytest.mark.skip('not yet')
- def test_python_prefork_same_processes(self):
- self.conf_proc('2')
- pids = self.pids_for_process()
- self.conf_proc('4')
- pids_new = self.pids_for_process()
+def test_python_prefork():
+ conf_proc('2')
- assert pids.issubset(pids_new), 'prefork same processes'
+ pids = pids_for_process()
+ assert len(pids) == 2, 'prefork 2'
- def test_python_ondemand(self):
- self.conf_proc({"spare": 0, "max": 8, "idle_timeout": 1})
+ client.get()
+ assert pids_for_process() == pids, 'prefork still 2'
- assert len(self.pids_for_process()) == 0, 'on-demand 0'
+ conf_proc('4')
- self.get()
- pids = self.pids_for_process()
- assert len(pids) == 1, 'on-demand 1'
+ pids = pids_for_process()
+ assert len(pids) == 4, 'prefork 4'
- self.get()
- assert self.pids_for_process() == pids, 'on-demand still 1'
+ client.get()
+ assert pids_for_process() == pids, 'prefork still 4'
- time.sleep(1)
+ stop_all()
- assert len(self.pids_for_process()) == 0, 'on-demand stop idle'
- self.stop_all()
+@pytest.mark.skip('not yet')
+def test_python_prefork_same_processes():
+ conf_proc('2')
+ pids = pids_for_process()
- def test_python_scale_updown(self):
- self.conf_proc({"spare": 2, "max": 8, "idle_timeout": 1})
+ conf_proc('4')
+ pids_new = pids_for_process()
- pids = self.pids_for_process()
- assert len(pids) == 2, 'updown 2'
+ assert pids.issubset(pids_new), 'prefork same processes'
- self.get()
- pids_new = self.pids_for_process()
- assert len(pids_new) == 3, 'updown 3'
- assert pids.issubset(pids_new), 'updown 3 only 1 new'
- self.get()
- assert self.pids_for_process() == pids_new, 'updown still 3'
+def test_python_ondemand():
+ conf_proc({"spare": 0, "max": 8, "idle_timeout": 1})
- time.sleep(1)
+ assert len(pids_for_process()) == 0, 'on-demand 0'
- pids = self.pids_for_process()
- assert len(pids) == 2, 'updown stop idle'
+ client.get()
+ pids = pids_for_process()
+ assert len(pids) == 1, 'on-demand 1'
- self.get()
- pids_new = self.pids_for_process()
- assert len(pids_new) == 3, 'updown again 3'
- assert pids.issubset(pids_new), 'updown again 3 only 1 new'
+ client.get()
+ assert pids_for_process() == pids, 'on-demand still 1'
- self.stop_all()
+ time.sleep(1)
- def test_python_reconfigure(self):
- self.conf_proc({"spare": 2, "max": 6, "idle_timeout": 1})
+ assert len(pids_for_process()) == 0, 'on-demand stop idle'
- pids = self.pids_for_process()
- assert len(pids) == 2, 'reconf 2'
+ stop_all()
- self.get()
- pids_new = self.pids_for_process()
- assert len(pids_new) == 3, 'reconf 3'
- assert pids.issubset(pids_new), 'reconf 3 only 1 new'
- self.conf_proc('6', f'{self.app_proc}/spare')
+def test_python_scale_updown():
+ conf_proc({"spare": 2, "max": 8, "idle_timeout": 1})
- pids = self.pids_for_process()
- assert len(pids) == 6, 'reconf 6'
+ pids = pids_for_process()
+ assert len(pids) == 2, 'updown 2'
- self.get()
- assert self.pids_for_process() == pids, 'reconf still 6'
+ client.get()
+ pids_new = pids_for_process()
+ assert len(pids_new) == 3, 'updown 3'
+ assert pids.issubset(pids_new), 'updown 3 only 1 new'
- self.stop_all()
+ client.get()
+ assert pids_for_process() == pids_new, 'updown still 3'
- def test_python_idle_timeout(self):
- self.conf_proc({"spare": 0, "max": 6, "idle_timeout": 2})
+ time.sleep(1)
- self.get()
- pids = self.pids_for_process()
- assert len(pids) == 1, 'idle timeout 1'
+ pids = pids_for_process()
+ assert len(pids) == 2, 'updown stop idle'
- time.sleep(1)
+ client.get()
+ pids_new = pids_for_process()
+ assert len(pids_new) == 3, 'updown again 3'
+ assert pids.issubset(pids_new), 'updown again 3 only 1 new'
- self.get()
+ stop_all()
- time.sleep(1)
- pids_new = self.pids_for_process()
- assert len(pids_new) == 1, 'idle timeout still 1'
- assert self.pids_for_process() == pids, 'idle timeout still 1 same pid'
+def test_python_reconfigure():
+ conf_proc({"spare": 2, "max": 6, "idle_timeout": 1})
- time.sleep(1)
+ pids = pids_for_process()
+ assert len(pids) == 2, 'reconf 2'
- assert len(self.pids_for_process()) == 0, 'idle timed out'
+ client.get()
+ pids_new = pids_for_process()
+ assert len(pids_new) == 3, 'reconf 3'
+ assert pids.issubset(pids_new), 'reconf 3 only 1 new'
- def test_python_processes_connection_keepalive(self):
- self.conf_proc({"spare": 0, "max": 6, "idle_timeout": 2})
+ conf_proc('6', f'{client.app_proc}/spare')
- (resp, sock) = self.get(
- headers={'Host': 'localhost', 'Connection': 'keep-alive'},
- start=True,
- read_timeout=1,
- )
- assert len(self.pids_for_process()) == 1, 'keepalive connection 1'
+ pids = pids_for_process()
+ assert len(pids) == 6, 'reconf 6'
- time.sleep(2)
+ client.get()
+ assert pids_for_process() == pids, 'reconf still 6'
- assert len(self.pids_for_process()) == 0, 'keepalive connection 0'
+ stop_all()
- sock.close()
- def test_python_processes_access(self):
- self.conf_proc('1')
+def test_python_idle_timeout():
+ conf_proc({"spare": 0, "max": 6, "idle_timeout": 2})
- path = f'/{self.app_proc}'
- assert 'error' in self.conf_get(f'{path}/max')
- assert 'error' in self.conf_get(f'{path}/spare')
- assert 'error' in self.conf_get(f'{path}/idle_timeout')
+ client.get()
+ pids = pids_for_process()
+ assert len(pids) == 1, 'idle timeout 1'
- def test_python_processes_invalid(self):
- assert 'error' in self.conf(
- {"spare": -1}, self.app_proc
- ), 'negative spare'
- assert 'error' in self.conf({"max": -1}, self.app_proc), 'negative max'
- assert 'error' in self.conf(
- {"idle_timeout": -1}, self.app_proc
- ), 'negative idle_timeout'
- assert 'error' in self.conf(
- {"spare": 2}, self.app_proc
- ), 'spare gt max default'
- assert 'error' in self.conf(
- {"spare": 2, "max": 1}, self.app_proc
- ), 'spare gt max'
- assert 'error' in self.conf(
- {"spare": 0, "max": 0}, self.app_proc
- ), 'max zero'
+ time.sleep(1)
- def stop_all(self):
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
+ client.get()
- assert len(self.pids_for_process()) == 0, 'stop all'
+ time.sleep(1)
- def test_python_restart(self, temp_dir):
- shutil.copyfile(
- f'{option.test_dir}/python/restart/v1.py', f'{temp_dir}/wsgi.py'
- )
+ pids_new = pids_for_process()
+ assert len(pids_new) == 1, 'idle timeout still 1'
+ assert pids_for_process() == pids, 'idle timeout still 1 same pid'
- self.load(
- temp_dir,
- name=self.app_name,
- processes=1,
- environment={'PYTHONDONTWRITEBYTECODE': '1'},
- )
+ time.sleep(1)
- b = self.get()['body']
- assert b == "v1", 'process started'
+ assert len(pids_for_process()) == 0, 'idle timed out'
- shutil.copyfile(
- f'{option.test_dir}/python/restart/v2.py', f'{temp_dir}/wsgi.py'
- )
- b = self.get()['body']
- assert b == "v1", 'still old process'
+def test_python_processes_connection_keepalive():
+ conf_proc({"spare": 0, "max": 6, "idle_timeout": 2})
- assert 'success' in self.conf_get(
- f'/control/applications/{self.app_name}/restart'
- ), 'restart processes'
+ (_, sock) = client.get(
+ headers={'Host': 'localhost', 'Connection': 'keep-alive'},
+ start=True,
+ read_timeout=1,
+ )
+ assert len(pids_for_process()) == 1, 'keepalive connection 1'
- b = self.get()['body']
- assert b == "v2", 'new process started'
+ time.sleep(2)
- assert 'error' in self.conf_get(
- '/control/applications/blah/restart'
- ), 'application incorrect'
+ assert len(pids_for_process()) == 0, 'keepalive connection 0'
- assert 'error' in self.conf_delete(
- f'/control/applications/{self.app_name}/restart'
- ), 'method incorrect'
+ sock.close()
- def test_python_restart_multi(self):
- self.conf_proc('2')
- pids = self.pids_for_process()
- assert len(pids) == 2, 'restart 2 started'
+def test_python_processes_access():
+ conf_proc('1')
- assert 'success' in self.conf_get(
- f'/control/applications/{self.app_name}/restart'
- ), 'restart processes'
+ path = f'/{client.app_proc}'
+ assert 'error' in client.conf_get(f'{path}/max')
+ assert 'error' in client.conf_get(f'{path}/spare')
+ assert 'error' in client.conf_get(f'{path}/idle_timeout')
- new_pids = self.pids_for_process()
- assert len(new_pids) == 2, 'restart still 2'
- assert len(new_pids.intersection(pids)) == 0, 'restart all new'
+def test_python_processes_invalid():
+ assert 'error' in client.conf(
+ {"spare": -1}, client.app_proc
+ ), 'negative spare'
+ assert 'error' in client.conf({"max": -1}, client.app_proc), 'negative max'
+ assert 'error' in client.conf(
+ {"idle_timeout": -1}, client.app_proc
+ ), 'negative idle_timeout'
+ assert 'error' in client.conf(
+ {"spare": 2}, client.app_proc
+ ), 'spare gt max default'
+ assert 'error' in client.conf(
+ {"spare": 2, "max": 1}, client.app_proc
+ ), 'spare gt max'
+ assert 'error' in client.conf(
+ {"spare": 0, "max": 0}, client.app_proc
+ ), 'max zero'
- def test_python_restart_longstart(self):
- self.load(
- 'restart',
- name=self.app_name,
- module="longstart",
- processes={"spare": 1, "max": 2, "idle_timeout": 5},
- )
- assert len(self.pids_for_process()) == 1, 'longstarts == 1'
+def test_python_restart(temp_dir):
+ shutil.copyfile(
+ f'{option.test_dir}/python/restart/v1.py', f'{temp_dir}/wsgi.py'
+ )
- self.get()
+ client.load(
+ temp_dir,
+ name=client.app_name,
+ processes=1,
+ environment={'PYTHONDONTWRITEBYTECODE': '1'},
+ )
- pids = self.pids_for_process()
- assert len(pids) == 2, 'longstarts == 2'
+ b = client.get()['body']
+ assert b == "v1", 'process started'
- assert 'success' in self.conf_get(
- f'/control/applications/{self.app_name}/restart'
- ), 'restart processes'
-
- # wait for longstarted app
- time.sleep(2)
-
- new_pids = self.pids_for_process()
- assert len(new_pids) == 1, 'restart 1'
+ shutil.copyfile(
+ f'{option.test_dir}/python/restart/v2.py', f'{temp_dir}/wsgi.py'
+ )
- assert len(new_pids.intersection(pids)) == 0, 'restart all new'
+ b = client.get()['body']
+ assert b == "v1", 'still old process'
+
+ assert 'success' in client.conf_get(
+ f'/control/applications/{client.app_name}/restart'
+ ), 'restart processes'
+
+ b = client.get()['body']
+ assert b == "v2", 'new process started'
+
+ assert 'error' in client.conf_get(
+ '/control/applications/blah/restart'
+ ), 'application incorrect'
+
+ assert 'error' in client.conf_delete(
+ f'/control/applications/{client.app_name}/restart'
+ ), 'method incorrect'
+
+
+def test_python_restart_multi():
+ conf_proc('2')
+
+ pids = pids_for_process()
+ assert len(pids) == 2, 'restart 2 started'
+
+ assert 'success' in client.conf_get(
+ f'/control/applications/{client.app_name}/restart'
+ ), 'restart processes'
+
+ new_pids = pids_for_process()
+ assert len(new_pids) == 2, 'restart still 2'
+
+ assert len(new_pids.intersection(pids)) == 0, 'restart all new'
+
+
+def test_python_restart_longstart():
+ client.load(
+ 'restart',
+ name=client.app_name,
+ module="longstart",
+ processes={"spare": 1, "max": 2, "idle_timeout": 5},
+ )
+
+ assert len(pids_for_process()) == 1, 'longstarts == 1'
+
+ client.get()
+
+ pids = pids_for_process()
+ assert len(pids) == 2, 'longstarts == 2'
+
+ assert 'success' in client.conf_get(
+ f'/control/applications/{client.app_name}/restart'
+ ), 'restart processes'
+
+ # wait for longstarted app
+ time.sleep(2)
+
+ new_pids = pids_for_process()
+ assert len(new_pids) == 1, 'restart 1'
+
+ assert len(new_pids.intersection(pids)) == 0, 'restart all new'
diff --git a/test/test_python_targets.py b/test/test_python_targets.py
index f55609ba..46e77c19 100644
--- a/test/test_python_targets.py
+++ b/test/test_python_targets.py
@@ -1,103 +1,105 @@
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'all'}}
-class TestPythonTargets(TestApplicationPython):
- prerequisites = {'modules': {'python': 'all'}}
+client = ApplicationPython()
- def test_python_targets(self):
- python_dir = f'{option.test_dir}/python'
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/1"},
- "action": {"pass": "applications/targets/1"},
- },
- {
- "match": {"uri": "/2"},
- "action": {"pass": "applications/targets/2"},
- },
- ],
- "applications": {
+def test_python_targets():
+ python_dir = f'{option.test_dir}/python'
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/1"},
+ "action": {"pass": "applications/targets/1"},
+ },
+ {
+ "match": {"uri": "/2"},
+ "action": {"pass": "applications/targets/2"},
+ },
+ ],
+ "applications": {
+ "targets": {
+ "type": client.get_application_type(),
+ "working_directory": f'{python_dir}/targets/',
+ "path": f'{python_dir}/targets/',
"targets": {
- "type": self.get_application_type(),
- "working_directory": f'{python_dir}/targets/',
- "path": f'{python_dir}/targets/',
- "targets": {
- "1": {
- "module": "wsgi",
- "callable": "wsgi_target_a",
- },
- "2": {
- "module": "wsgi",
- "callable": "wsgi_target_b",
- },
+ "1": {
+ "module": "wsgi",
+ "callable": "wsgi_target_a",
},
- }
- },
- }
- )
+ "2": {
+ "module": "wsgi",
+ "callable": "wsgi_target_b",
+ },
+ },
+ }
+ },
+ }
+ )
- resp = self.get(url='/1')
- assert resp['status'] == 200
- assert resp['body'] == '1'
+ resp = client.get(url='/1')
+ assert resp['status'] == 200
+ assert resp['body'] == '1'
- resp = self.get(url='/2')
- assert resp['status'] == 200
- assert resp['body'] == '2'
+ resp = client.get(url='/2')
+ assert resp['status'] == 200
+ assert resp['body'] == '2'
- def test_python_targets_prefix(self):
- python_dir = f'{option.test_dir}/python'
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": ["/app*"]},
- "action": {"pass": "applications/targets/app"},
- },
- {
- "match": {"uri": "*"},
- "action": {"pass": "applications/targets/catchall"},
- },
- ],
- "applications": {
+def test_python_targets_prefix():
+ python_dir = f'{option.test_dir}/python'
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": ["/app*"]},
+ "action": {"pass": "applications/targets/app"},
+ },
+ {
+ "match": {"uri": "*"},
+ "action": {"pass": "applications/targets/catchall"},
+ },
+ ],
+ "applications": {
+ "targets": {
+ "type": "python",
+ "working_directory": f'{python_dir}/targets/',
+ "path": f'{python_dir}/targets/',
+ "protocol": "wsgi",
"targets": {
- "type": "python",
- "working_directory": f'{python_dir}/targets/',
- "path": f'{python_dir}/targets/',
- "protocol": "wsgi",
- "targets": {
- "app": {
- "module": "wsgi",
- "callable": "wsgi_target_prefix",
- "prefix": "/app/",
- },
- "catchall": {
- "module": "wsgi",
- "callable": "wsgi_target_prefix",
- "prefix": "/api",
- },
+ "app": {
+ "module": "wsgi",
+ "callable": "wsgi_target_prefix",
+ "prefix": "/app/",
},
- }
- },
- }
- )
+ "catchall": {
+ "module": "wsgi",
+ "callable": "wsgi_target_prefix",
+ "prefix": "/api",
+ },
+ },
+ }
+ },
+ }
+ )
- def check_prefix(url, body):
- resp = self.get(url=url)
- assert resp['status'] == 200
- assert resp['body'] == body
+ def check_prefix(url, body):
+ resp = client.get(url=url)
+ assert resp['status'] == 200
+ assert resp['body'] == body
- check_prefix('/app', '/app ')
- check_prefix('/app/', '/app /')
- check_prefix('/app/rest/user/', '/app /rest/user/')
- check_prefix('/catchall', 'No Script Name /catchall')
- check_prefix('/api', '/api ')
- check_prefix('/api/', '/api /')
- check_prefix('/apis', 'No Script Name /apis')
- check_prefix('/api/users/', '/api /users/')
+ check_prefix('/app', '/app ')
+ check_prefix('/app/', '/app /')
+ check_prefix('/app/rest/user/', '/app /rest/user/')
+ check_prefix('/catchall', 'No Script Name /catchall')
+ check_prefix('/api', '/api ')
+ check_prefix('/api/', '/api /')
+ check_prefix('/apis', 'No Script Name /apis')
+ check_prefix('/api/users/', '/api /users/')
diff --git a/test/test_reconfigure.py b/test/test_reconfigure.py
index feb027aa..53258b41 100644
--- a/test/test_reconfigure.py
+++ b/test/test_reconfigure.py
@@ -1,52 +1,54 @@
import time
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
+client = ApplicationProto()
-class TestReconfigure(TestApplicationProto):
- prerequisites = {}
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- )
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ )
- def clear_conf(self):
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
- def test_reconfigure(self):
- sock = self.http(
- b"""GET / HTTP/1.1
+def clear_conf():
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
+
+
+def test_reconfigure():
+ sock = client.http(
+ b"""GET / HTTP/1.1
""",
- raw=True,
- no_recv=True,
- )
+ raw=True,
+ no_recv=True,
+ )
- self.clear_conf()
+ clear_conf()
- resp = self.http(
- b"""Host: localhost
+ resp = client.http(
+ b"""Host: localhost
Connection: close
""",
- sock=sock,
- raw=True,
- )
- assert resp['status'] == 200, 'finish request'
+ sock=sock,
+ raw=True,
+ )
+ assert resp['status'] == 200, 'finish request'
+
- def test_reconfigure_2(self):
- sock = self.http(b'', raw=True, no_recv=True)
+def test_reconfigure_2():
+ sock = client.http(b'', raw=True, no_recv=True)
- # Waiting for connection completion.
- # Delay should be more than TCP_DEFER_ACCEPT.
- time.sleep(1.5)
+ # Waiting for connection completion.
+ # Delay should be more than TCP_DEFER_ACCEPT.
+ time.sleep(1.5)
- self.clear_conf()
+ clear_conf()
- assert self.get(sock=sock)['status'] == 408, 'request timeout'
+ assert client.get(sock=sock)['status'] == 408, 'request timeout'
diff --git a/test/test_reconfigure_tls.py b/test/test_reconfigure_tls.py
index 0f92a419..b473b147 100644
--- a/test/test_reconfigure_tls.py
+++ b/test/test_reconfigure_tls.py
@@ -3,103 +3,110 @@ import ssl
import time
import pytest
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestReconfigureTLS(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self):
- if 'HAS_TLSv1_2' not in dir(ssl) or not ssl.HAS_TLSv1_2:
- pytest.skip('OpenSSL too old')
- self.certificate()
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ if 'HAS_TLSv1_2' not in dir(ssl) or not ssl.HAS_TLSv1_2:
+ pytest.skip('OpenSSL too old')
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {
- "pass": "routes",
- "tls": {"certificate": "default"},
- }
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- ), 'load application configuration'
+ client.certificate()
- def create_socket(self):
- ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
- ctx.check_hostname = False
- ctx.verify_mode = ssl.CERT_NONE
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {
+ "pass": "routes",
+ "tls": {"certificate": "default"},
+ }
+ },
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ ), 'load application configuration'
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- ssl_sock = ctx.wrap_socket(
- s, server_hostname='localhost', do_handshake_on_connect=False
- )
- ssl_sock.connect(('127.0.0.1', 7080))
- return ssl_sock
+def create_socket():
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ctx.check_hostname = False
+ ctx.verify_mode = ssl.CERT_NONE
- def clear_conf(self):
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ ssl_sock = ctx.wrap_socket(
+ s, server_hostname='localhost', do_handshake_on_connect=False
+ )
+ ssl_sock.connect(('127.0.0.1', 7080))
- @pytest.mark.skip('not yet')
- def test_reconfigure_tls_switch(self):
- assert 'success' in self.conf_delete('listeners/*:7080/tls')
+ return ssl_sock
- (_, sock) = self.get(
- headers={'Host': 'localhost', 'Connection': 'keep-alive'},
- start=True,
- read_timeout=1,
- )
- assert 'success' in self.conf(
- {"pass": "routes", "tls": {"certificate": "default"}},
- 'listeners/*:7080',
- )
+def clear_conf():
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
- assert self.get(sock=sock)['status'] == 200, 'reconfigure'
- assert self.get_ssl()['status'] == 200, 'reconfigure tls'
- def test_reconfigure_tls(self):
- ssl_sock = self.create_socket()
+@pytest.mark.skip('not yet')
+def test_reconfigure_tls_switch():
+ assert 'success' in client.conf_delete('listeners/*:7080/tls')
- ssl_sock.sendall("""GET / HTTP/1.1\r\n""".encode())
+ (_, sock) = client.get(
+ headers={'Host': 'localhost', 'Connection': 'keep-alive'},
+ start=True,
+ read_timeout=1,
+ )
- self.clear_conf()
+ assert 'success' in client.conf(
+ {"pass": "routes", "tls": {"certificate": "default"}},
+ 'listeners/*:7080',
+ )
- ssl_sock.sendall(
- """Host: localhost\r\nConnection: close\r\n\r\n""".encode()
- )
+ assert client.get(sock=sock)['status'] == 200, 'reconfigure'
+ assert client.get_ssl()['status'] == 200, 'reconfigure tls'
- assert (
- self.recvall(ssl_sock).decode().startswith('HTTP/1.1 200 OK')
- ), 'finish request'
- def test_reconfigure_tls_2(self):
- ssl_sock = self.create_socket()
+def test_reconfigure_tls():
+ ssl_sock = create_socket()
- # Waiting for connection completion.
- # Delay should be more than TCP_DEFER_ACCEPT.
- time.sleep(1.5)
+ ssl_sock.sendall("""GET / HTTP/1.1\r\n""".encode())
- self.clear_conf()
+ clear_conf()
- try:
- ssl_sock.do_handshake()
- except ssl.SSLError:
- ssl_sock.close()
- success = True
+ ssl_sock.sendall(
+ """Host: localhost\r\nConnection: close\r\n\r\n""".encode()
+ )
- if not success:
- pytest.fail('Connection is not closed.')
+ assert (
+ client.recvall(ssl_sock).decode().startswith('HTTP/1.1 200 OK')
+ ), 'finish request'
- def test_reconfigure_tls_3(self):
- ssl_sock = self.create_socket()
+
+def test_reconfigure_tls_2():
+ ssl_sock = create_socket()
+
+ # Waiting for connection completion.
+ # Delay should be more than TCP_DEFER_ACCEPT.
+ time.sleep(1.5)
+
+ clear_conf()
+
+ try:
ssl_sock.do_handshake()
+ except ssl.SSLError:
+ ssl_sock.close()
+ success = True
+
+ if not success:
+ pytest.fail('Connection is not closed.')
+
+
+def test_reconfigure_tls_3():
+ ssl_sock = create_socket()
+ ssl_sock.do_handshake()
- self.clear_conf()
+ clear_conf()
- assert self.get(sock=ssl_sock)['status'] == 408, 'request timeout'
+ assert client.get(sock=ssl_sock)['status'] == 408, 'request timeout'
diff --git a/test/test_respawn.py b/test/test_respawn.py
index 3d3dfac3..dc465cda 100644
--- a/test/test_respawn.py
+++ b/test/test_respawn.py
@@ -2,99 +2,107 @@ import re
import subprocess
import time
-from unit.applications.lang.python import TestApplicationPython
-from unit.option import option
+import pytest
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'any'}}
-class TestRespawn(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- PATTERN_ROUTER = 'unit: router'
- PATTERN_CONTROLLER = 'unit: controller'
+PATTERN_ROUTER = 'unit: router'
+PATTERN_CONTROLLER = 'unit: controller'
- def setup_method(self):
- self.app_name = f'app-{option.temp_dir.split("/")[-1]}'
- self.load('empty', self.app_name)
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ client.app_name = f'app-{temp_dir.split("/")[-1]}'
- assert 'success' in self.conf(
- '1', f'applications/{self.app_name}/processes'
- )
+ client.load('empty', client.app_name)
- def pid_by_name(self, name, ppid):
- output = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
- m = re.search(fr'\s*(\d+)\s*{ppid}.*{name}', output)
- return None if m is None else m.group(1)
+ assert 'success' in client.conf(
+ '1', f'applications/{client.app_name}/processes'
+ )
- def kill_pids(self, *pids):
- subprocess.call(['kill', '-9', *pids])
- def wait_for_process(self, process, unit_pid):
- for i in range(50):
- found = self.pid_by_name(process, unit_pid)
+def pid_by_name(name, ppid):
+ output = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
+ m = re.search(fr'\s*(\d+)\s*{ppid}.*{name}', output)
+ return None if m is None else m.group(1)
- if found is not None:
- break
- time.sleep(0.1)
+def kill_pids(*pids):
+ subprocess.call(['kill', '-9', *pids])
- return found
- def find_proc(self, name, ppid, ps_output):
- return re.findall(fr'{ppid}.*{name}', ps_output)
+def wait_for_process(process, unit_pid):
+ for _ in range(50):
+ found = pid_by_name(process, unit_pid)
- def smoke_test(self, unit_pid):
- for _ in range(10):
- r = self.conf('1', f'applications/{self.app_name}/processes')
+ if found is not None:
+ break
- if 'success' in r:
- break
+ time.sleep(0.1)
- time.sleep(0.1)
+ return found
- assert 'success' in r
- assert self.get()['status'] == 200
- # Check if the only one router, controller,
- # and application processes running.
+def find_proc(name, ppid, ps_output):
+ return re.findall(fr'{ppid}.*{name}', ps_output)
- out = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
- assert len(self.find_proc(self.PATTERN_ROUTER, unit_pid, out)) == 1
- assert len(self.find_proc(self.PATTERN_CONTROLLER, unit_pid, out)) == 1
- assert len(self.find_proc(self.app_name, unit_pid, out)) == 1
- def test_respawn_router(self, skip_alert, unit_pid, skip_fds_check):
- skip_fds_check(router=True)
- pid = self.pid_by_name(self.PATTERN_ROUTER, unit_pid)
+def smoke_test(unit_pid):
+ for _ in range(10):
+ r = client.conf('1', f'applications/{client.app_name}/processes')
- self.kill_pids(pid)
- skip_alert(fr'process {pid} exited on signal 9')
+ if 'success' in r:
+ break
- assert self.wait_for_process(self.PATTERN_ROUTER, unit_pid) is not None
+ time.sleep(0.1)
- self.smoke_test(unit_pid)
+ assert 'success' in r
+ assert client.get()['status'] == 200
- def test_respawn_controller(self, skip_alert, unit_pid, skip_fds_check):
- skip_fds_check(controller=True)
- pid = self.pid_by_name(self.PATTERN_CONTROLLER, unit_pid)
+ # Check if the only one router, controller,
+ # and application processes running.
- self.kill_pids(pid)
- skip_alert(fr'process {pid} exited on signal 9')
+ out = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
+ assert len(find_proc(PATTERN_ROUTER, unit_pid, out)) == 1
+ assert len(find_proc(PATTERN_CONTROLLER, unit_pid, out)) == 1
+ assert len(find_proc(client.app_name, unit_pid, out)) == 1
- assert (
- self.wait_for_process(self.PATTERN_CONTROLLER, unit_pid) is not None
- )
- assert self.get()['status'] == 200
+def test_respawn_router(skip_alert, unit_pid, skip_fds_check):
+ skip_fds_check(router=True)
+ pid = pid_by_name(PATTERN_ROUTER, unit_pid)
- self.smoke_test(unit_pid)
+ kill_pids(pid)
+ skip_alert(fr'process {pid} exited on signal 9')
- def test_respawn_application(self, skip_alert, unit_pid):
- pid = self.pid_by_name(self.app_name, unit_pid)
+ assert wait_for_process(PATTERN_ROUTER, unit_pid) is not None
- self.kill_pids(pid)
- skip_alert(fr'process {pid} exited on signal 9')
+ smoke_test(unit_pid)
- assert self.wait_for_process(self.app_name, unit_pid) is not None
- self.smoke_test(unit_pid)
+def test_respawn_controller(skip_alert, unit_pid, skip_fds_check):
+ skip_fds_check(controller=True)
+ pid = pid_by_name(PATTERN_CONTROLLER, unit_pid)
+
+ kill_pids(pid)
+ skip_alert(fr'process {pid} exited on signal 9')
+
+ assert wait_for_process(PATTERN_CONTROLLER, unit_pid) is not None
+
+ assert client.get()['status'] == 200
+
+ smoke_test(unit_pid)
+
+
+def test_respawn_application(skip_alert, unit_pid):
+ pid = pid_by_name(client.app_name, unit_pid)
+
+ kill_pids(pid)
+ skip_alert(fr'process {pid} exited on signal 9')
+
+ assert wait_for_process(client.app_name, unit_pid) is not None
+
+ smoke_test(unit_pid)
diff --git a/test/test_return.py b/test/test_return.py
index 4b8bddc7..35525ed5 100644
--- a/test/test_return.py
+++ b/test/test_return.py
@@ -1,214 +1,220 @@
import re
-from unit.applications.proto import TestApplicationProto
+import pytest
+from unit.applications.proto import ApplicationProto
+client = ApplicationProto()
-class TestReturn(TestApplicationProto):
- prerequisites = {}
- def setup_method(self):
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- )
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ )
- def get_resps_sc(self, req=10):
- to_send = b"""GET / HTTP/1.1
+
+def get_resps_sc(req=10):
+ to_send = b"""GET / HTTP/1.1
Host: localhost
""" * (
- req - 1
- )
+ req - 1
+ )
- to_send += b"""GET / HTTP/1.1
+ to_send += b"""GET / HTTP/1.1
Host: localhost
Connection: close
"""
- return self.http(to_send, raw_resp=True, raw=True)
+ return client.http(to_send, raw_resp=True, raw=True)
+
+
+def test_return():
+ resp = client.get()
+ assert resp['status'] == 200
+ assert 'Server' in resp['headers']
+ assert 'Date' in resp['headers']
+ assert resp['headers']['Content-Length'] == '0'
+ assert resp['headers']['Connection'] == 'close'
+ assert resp['body'] == '', 'body'
+
+ resp = client.post(body='blah')
+ assert resp['status'] == 200
+ assert resp['body'] == '', 'body'
+
+ resp = get_resps_sc()
+ assert len(re.findall('200 OK', resp)) == 10
+ assert len(re.findall('Connection:', resp)) == 1
+ assert len(re.findall('Connection: close', resp)) == 1
+
+ resp = client.get(http_10=True)
+ assert resp['status'] == 200
+ assert 'Server' in resp['headers']
+ assert 'Date' in resp['headers']
+ assert resp['headers']['Content-Length'] == '0'
+ assert 'Connection' not in resp['headers']
+ assert resp['body'] == '', 'body'
+
+
+def test_return_update():
+ assert 'success' in client.conf('0', 'routes/0/action/return')
+
+ resp = client.get()
+ assert resp['status'] == 0
+ assert resp['body'] == ''
+
+ assert 'success' in client.conf('404', 'routes/0/action/return')
+
+ resp = client.get()
+ assert resp['status'] == 404
+ assert resp['body'] != ''
+
+ assert 'success' in client.conf('598', 'routes/0/action/return')
+
+ resp = client.get()
+ assert resp['status'] == 598
+ assert resp['body'] != ''
- def test_return(self):
- resp = self.get()
- assert resp['status'] == 200
- assert 'Server' in resp['headers']
- assert 'Date' in resp['headers']
- assert resp['headers']['Content-Length'] == '0'
- assert resp['headers']['Connection'] == 'close'
- assert resp['body'] == '', 'body'
-
- resp = self.post(body='blah')
- assert resp['status'] == 200
- assert resp['body'] == '', 'body'
-
- resp = self.get_resps_sc()
- assert len(re.findall('200 OK', resp)) == 10
- assert len(re.findall('Connection:', resp)) == 1
- assert len(re.findall('Connection: close', resp)) == 1
-
- resp = self.get(http_10=True)
- assert resp['status'] == 200
- assert 'Server' in resp['headers']
- assert 'Date' in resp['headers']
- assert resp['headers']['Content-Length'] == '0'
- assert 'Connection' not in resp['headers']
- assert resp['body'] == '', 'body'
-
- def test_return_update(self):
- assert 'success' in self.conf('0', 'routes/0/action/return')
-
- resp = self.get()
- assert resp['status'] == 0
- assert resp['body'] == ''
-
- assert 'success' in self.conf('404', 'routes/0/action/return')
-
- resp = self.get()
- assert resp['status'] == 404
- assert resp['body'] != ''
-
- assert 'success' in self.conf('598', 'routes/0/action/return')
-
- resp = self.get()
- assert resp['status'] == 598
- assert resp['body'] != ''
-
- assert 'success' in self.conf('999', 'routes/0/action/return')
-
- resp = self.get()
- assert resp['status'] == 999
- assert resp['body'] == ''
-
- def test_return_location(self):
- reserved = ":/?#[]@!&'()*+,;="
- unreserved = (
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
- "0123456789-._~"
- )
- unsafe = " \"%<>\\^`{|}"
- unsafe_enc = "%20%22%25%3C%3E%5C%5E%60%7B%7C%7D"
-
- def check_location(location, expect=None):
- if expect is None:
- expect = location
-
- assert 'success' in self.conf(
- {"return": 301, "location": location}, 'routes/0/action'
- ), 'configure location'
-
- assert self.get()['headers']['Location'] == expect
-
- # FAIL: can't specify empty header value.
- # check_location("")
-
- check_location(reserved)
-
- # After first "?" all other "?" encoded.
- check_location(f'/?{reserved}', "/?:/%3F#[]@!&'()*+,;=")
- check_location("???", "?%3F%3F")
-
- # After first "#" all other "?" or "#" encoded.
- check_location(f'/#{reserved}', "/#:/%3F%23[]@!&'()*+,;=")
- check_location("##?#?", "#%23%3F%23%3F")
-
- # After first "?" next "#" not encoded.
- check_location(f'/?#{reserved}', "/?#:/%3F%23[]@!&'()*+,;=")
- check_location("??##", "?%3F#%23")
- check_location("/?##?", "/?#%23%3F")
-
- # Unreserved never encoded.
- check_location(unreserved)
- check_location(f'/{unreserved}?{unreserved}#{unreserved}')
-
- # Unsafe always encoded.
- check_location(unsafe, unsafe_enc)
- check_location(f'?{unsafe}', f'?{unsafe_enc}')
- check_location(f'#{unsafe}', f'#{unsafe_enc}')
-
- # %00-%20 and %7F-%FF always encoded.
- check_location(u"\u0000\u0018\u001F\u0020\u0021", "%00%18%1F%20!")
- check_location(u"\u007F\u0080н\u20BD", "%7F%C2%80%D0%BD%E2%82%BD")
-
- # Encoded string detection. If at least one char need to be encoded
- # then whole string will be encoded.
- check_location("%20")
- check_location("/%20?%20#%20")
- check_location(" %20", "%20%2520")
- check_location("%20 ", "%2520%20")
- check_location("/%20?%20#%20 ", "/%2520?%2520#%2520%20")
-
- def test_return_location_edit(self):
- assert 'success' in self.conf(
- {"return": 302, "location": "blah"}, 'routes/0/action'
- ), 'configure init location'
- assert self.get()['headers']['Location'] == 'blah'
-
- assert 'success' in self.conf_delete(
- 'routes/0/action/location'
- ), 'location delete'
- assert 'Location' not in self.get()['headers']
-
- assert 'success' in self.conf(
- '"blah"', 'routes/0/action/location'
- ), 'location restore'
- assert self.get()['headers']['Location'] == 'blah'
-
- assert 'error' in self.conf_post(
- '"blah"', 'routes/0/action/location'
- ), 'location method not allowed'
- assert self.get()['headers']['Location'] == 'blah'
-
- assert 'success' in self.conf(
- '"https://${host}${uri}"', 'routes/0/action/location'
- ), 'location with variables'
- assert self.get()['headers']['Location'] == 'https://localhost/'
-
- assert 'success' in self.conf(
- '"/#$host"', 'routes/0/action/location'
- ), 'location with encoding and a variable'
- assert self.get()['headers']['Location'] == '/#localhost'
-
- assert (
- self.get(headers={"Host": "#foo?bar", "Connection": "close"})[
- 'headers'
- ]['Location']
- == "/#%23foo%3Fbar"
- ), 'location with a variable with encoding'
-
- assert 'success' in self.conf(
- '""', 'routes/0/action/location'
- ), 'location empty'
- assert self.get()['headers']['Location'] == ''
-
- assert 'success' in self.conf(
- '"${host}"', 'routes/0/action/location'
- ), 'location empty with variable'
- assert (
- self.get(headers={"Host": "", "Connection": "close"})['headers'][
- 'Location'
- ]
- == ""
- ), 'location with empty variable'
-
- def test_return_invalid(self):
- def check_error(conf):
- assert 'error' in self.conf(conf, 'routes/0/action')
-
- check_error({"return": "200"})
- check_error({"return": []})
- check_error({"return": 80.1})
- check_error({"return": 1000})
- check_error({"return": -1})
- check_error({"return": 200, "share": "/blah"})
- check_error({"return": 200, "location": "$hos"})
- check_error({"return": 200, "location": "$hostblah"})
-
- assert 'error' in self.conf(
- '001', 'routes/0/action/return'
- ), 'leading zero'
-
- check_error({"return": 301, "location": 0})
- check_error({"return": 301, "location": []})
+ assert 'success' in client.conf('999', 'routes/0/action/return')
+
+ resp = client.get()
+ assert resp['status'] == 999
+ assert resp['body'] == ''
+
+
+def test_return_location():
+ reserved = ":/?#[]@!&'()*+,;="
+ unreserved = (
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" "0123456789-._~"
+ )
+ unsafe = " \"%<>\\^`{|}"
+ unsafe_enc = "%20%22%25%3C%3E%5C%5E%60%7B%7C%7D"
+
+ def check_location(location, expect=None):
+ if expect is None:
+ expect = location
+
+ assert 'success' in client.conf(
+ {"return": 301, "location": location}, 'routes/0/action'
+ ), 'configure location'
+
+ assert client.get()['headers']['Location'] == expect
+
+ # FAIL: can't specify empty header value.
+ # check_location("")
+
+ check_location(reserved)
+
+ # After first "?" all other "?" encoded.
+ check_location(f'/?{reserved}', "/?:/%3F#[]@!&'()*+,;=")
+ check_location("???", "?%3F%3F")
+
+ # After first "#" all other "?" or "#" encoded.
+ check_location(f'/#{reserved}', "/#:/%3F%23[]@!&'()*+,;=")
+ check_location("##?#?", "#%23%3F%23%3F")
+
+ # After first "?" next "#" not encoded.
+ check_location(f'/?#{reserved}', "/?#:/%3F%23[]@!&'()*+,;=")
+ check_location("??##", "?%3F#%23")
+ check_location("/?##?", "/?#%23%3F")
+
+ # Unreserved never encoded.
+ check_location(unreserved)
+ check_location(f'/{unreserved}?{unreserved}#{unreserved}')
+
+ # Unsafe always encoded.
+ check_location(unsafe, unsafe_enc)
+ check_location(f'?{unsafe}', f'?{unsafe_enc}')
+ check_location(f'#{unsafe}', f'#{unsafe_enc}')
+
+ # %00-%20 and %7F-%FF always encoded.
+ check_location("\u0000\u0018\u001F\u0020\u0021", "%00%18%1F%20!")
+ check_location("\u007F\u0080н\u20BD", "%7F%C2%80%D0%BD%E2%82%BD")
+
+ # Encoded string detection. If at least one char need to be encoded
+ # then whole string will be encoded.
+ check_location("%20")
+ check_location("/%20?%20#%20")
+ check_location(" %20", "%20%2520")
+ check_location("%20 ", "%2520%20")
+ check_location("/%20?%20#%20 ", "/%2520?%2520#%2520%20")
+
+
+def test_return_location_edit():
+ assert 'success' in client.conf(
+ {"return": 302, "location": "blah"}, 'routes/0/action'
+ ), 'configure init location'
+ assert client.get()['headers']['Location'] == 'blah'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/action/location'
+ ), 'location delete'
+ assert 'Location' not in client.get()['headers']
+
+ assert 'success' in client.conf(
+ '"blah"', 'routes/0/action/location'
+ ), 'location restore'
+ assert client.get()['headers']['Location'] == 'blah'
+
+ assert 'error' in client.conf_post(
+ '"blah"', 'routes/0/action/location'
+ ), 'location method not allowed'
+ assert client.get()['headers']['Location'] == 'blah'
+
+ assert 'success' in client.conf(
+ '"https://${host}${uri}"', 'routes/0/action/location'
+ ), 'location with variables'
+ assert client.get()['headers']['Location'] == 'https://localhost/'
+
+ assert 'success' in client.conf(
+ '"/#$host"', 'routes/0/action/location'
+ ), 'location with encoding and a variable'
+ assert client.get()['headers']['Location'] == '/#localhost'
+
+ assert (
+ client.get(headers={"Host": "#foo?bar", "Connection": "close"})[
+ 'headers'
+ ]['Location']
+ == "/#%23foo%3Fbar"
+ ), 'location with a variable with encoding'
+
+ assert 'success' in client.conf(
+ '""', 'routes/0/action/location'
+ ), 'location empty'
+ assert client.get()['headers']['Location'] == ''
+
+ assert 'success' in client.conf(
+ '"${host}"', 'routes/0/action/location'
+ ), 'location empty with variable'
+ assert (
+ client.get(headers={"Host": "", "Connection": "close"})['headers'][
+ 'Location'
+ ]
+ == ""
+ ), 'location with empty variable'
+
+
+def test_return_invalid():
+ def check_error(conf):
+ assert 'error' in client.conf(conf, 'routes/0/action')
+
+ check_error({"return": "200"})
+ check_error({"return": []})
+ check_error({"return": 80.1})
+ check_error({"return": 1000})
+ check_error({"return": -1})
+ check_error({"return": 200, "share": "/blah"})
+ check_error({"return": 200, "location": "$hos"})
+ check_error({"return": 200, "location": "$hostblah"})
+
+ assert 'error' in client.conf(
+ '001', 'routes/0/action/return'
+ ), 'leading zero'
+
+ check_error({"return": 301, "location": 0})
+ check_error({"return": 301, "location": []})
diff --git a/test/test_rewrite.py b/test/test_rewrite.py
index 3bc7df19..8d81ec49 100644
--- a/test/test_rewrite.py
+++ b/test/test_rewrite.py
@@ -1,219 +1,223 @@
import os
import pytest
-from unit.applications.proto import TestApplicationProto
-from unit.option import option
+from unit.applications.proto import ApplicationProto
+client = ApplicationProto()
-class TestRewrite(TestApplicationProto):
- prerequisites = {}
- def setup_method(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/"},
- "action": {"rewrite": "/new", "pass": "routes"},
- },
- {"match": {"uri": "/new"}, "action": {"return": 200}},
- ],
- "applications": {},
- "settings": {"http": {"log_route": True}},
- },
- ), 'set initial configuration'
-
- def set_rewrite(self, rewrite, uri):
- assert 'success' in self.conf(
- [
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
{
"match": {"uri": "/"},
- "action": {"rewrite": rewrite, "pass": "routes"},
+ "action": {"rewrite": "/new", "pass": "routes"},
},
- {"match": {"uri": uri}, "action": {"return": 200}},
+ {"match": {"uri": "/new"}, "action": {"return": 200}},
],
- 'routes',
- )
+ "applications": {},
+ "settings": {"http": {"log_route": True}},
+ },
+ ), 'set initial configuration'
- def test_rewrite(self):
- assert self.get()['status'] == 200
- assert (
- self.wait_for_record(rf'\[notice\].*"routes/1" selected')
- is not None
- )
- assert len(self.findall(rf'\[notice\].*URI rewritten to "/new"')) == 1
- assert len(self.findall(rf'\[notice\].*URI rewritten')) == 1
- self.set_rewrite("", "")
- assert self.get()['status'] == 200
+def set_rewrite(rewrite, uri):
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"uri": "/"},
+ "action": {"rewrite": rewrite, "pass": "routes"},
+ },
+ {"match": {"uri": uri}, "action": {"return": 200}},
+ ],
+ 'routes',
+ )
- def test_rewrite_variable(self):
- self.set_rewrite("/$host", "/localhost")
- assert self.get()['status'] == 200
- self.set_rewrite("${uri}a", "/a")
- assert self.get()['status'] == 200
+def test_rewrite(findall, wait_for_record):
+ assert client.get()['status'] == 200
+ assert wait_for_record(rf'\[notice\].*"routes/1" selected') is not None
+ assert len(findall(rf'\[notice\].*URI rewritten to "/new"')) == 1
+ assert len(findall(rf'\[notice\].*URI rewritten')) == 1
- def test_rewrite_encoded(self):
- assert 'success' in self.conf(
- [
- {
- "match": {"uri": "/f"},
- "action": {"rewrite": "${request_uri}oo", "pass": "routes"},
- },
- {"match": {"uri": "/foo"}, "action": {"return": 200}},
- ],
- 'routes',
- )
- assert self.get(url='/%66')['status'] == 200
+ set_rewrite("", "")
+ assert client.get()['status'] == 200
- assert 'success' in self.conf(
- [
- {
- "match": {"uri": "/f"},
- "action": {
- "rewrite": "${request_uri}o%6F",
- "pass": "routes",
- },
- },
- {"match": {"uri": "/foo"}, "action": {"return": 200}},
- ],
- 'routes',
- )
- assert self.get(url='/%66')['status'] == 200
- def test_rewrite_arguments(self):
- assert 'success' in self.conf(
- [
- {
- "match": {"uri": "/foo", "arguments": {"arg": "val"}},
- "action": {"rewrite": "/new?some", "pass": "routes"},
- },
- {
- "match": {"uri": "/new", "arguments": {"arg": "val"}},
- "action": {"return": 200},
- },
- ],
- 'routes',
- )
- assert self.get(url='/foo?arg=val')['status'] == 200
+def test_rewrite_variable():
+ set_rewrite("/$host", "/localhost")
+ assert client.get()['status'] == 200
- def test_rewrite_njs(self):
- if 'njs' not in option.available['modules'].keys():
- pytest.skip('NJS is not available')
+ set_rewrite("${uri}a", "/a")
+ assert client.get()['status'] == 200
- self.set_rewrite("`/${host}`", "/localhost")
- assert self.get()['status'] == 200
- def test_rewrite_location(self):
- def check_location(rewrite, expect):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "action": {
- "return": 301,
- "location": "$uri",
- "rewrite": rewrite,
- }
- }
- ],
- }
- )
- assert self.get()['headers']['Location'] == expect
+def test_rewrite_encoded():
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"uri": "/f"},
+ "action": {"rewrite": "${request_uri}oo", "pass": "routes"},
+ },
+ {"match": {"uri": "/foo"}, "action": {"return": 200}},
+ ],
+ 'routes',
+ )
+ assert client.get(url='/%66')['status'] == 200
+
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"uri": "/f"},
+ "action": {
+ "rewrite": "${request_uri}o%6F",
+ "pass": "routes",
+ },
+ },
+ {"match": {"uri": "/foo"}, "action": {"return": 200}},
+ ],
+ 'routes',
+ )
+ assert client.get(url='/%66')['status'] == 200
- check_location('/new', '/new')
- check_location('${request_uri}new', '/new')
- def test_rewrite_share(self, temp_dir):
- os.makedirs(f'{temp_dir}/dir')
- os.makedirs(f'{temp_dir}/foo')
+def test_rewrite_arguments():
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"uri": "/foo", "arguments": {"arg": "val"}},
+ "action": {"rewrite": "/new?some", "pass": "routes"},
+ },
+ {
+ "match": {"uri": "/new", "arguments": {"arg": "val"}},
+ "action": {"return": 200},
+ },
+ ],
+ 'routes',
+ )
+ assert client.get(url='/foo?arg=val')['status'] == 200
+
+
+def test_rewrite_njs(require):
+ require({'modules': {'njs': 'any'}})
- with open(f'{temp_dir}/foo/index.html', 'w') as fooindex:
- fooindex.write('fooindex')
+ set_rewrite("`/${host}`", "/localhost")
+ assert client.get()['status'] == 200
- # same action block
- assert 'success' in self.conf(
+def test_rewrite_location():
+ def check_location(rewrite, expect):
+ assert 'success' in client.conf(
{
"listeners": {"*:7080": {"pass": "routes"}},
"routes": [
{
"action": {
- "rewrite": "${request_uri}dir",
- "share": f'{temp_dir}$uri',
+ "return": 301,
+ "location": "$uri",
+ "rewrite": rewrite,
}
}
],
}
)
+ assert client.get()['headers']['Location'] == expect
- resp = self.get()
- assert resp['status'] == 301, 'redirect status'
- assert resp['headers']['Location'] == '/dir/', 'redirect Location'
+ check_location('/new', '/new')
+ check_location('${request_uri}new', '/new')
- # request_uri
- index_path = f'{temp_dir}${{request_uri}}/index.html'
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/foo"},
- "action": {
- "rewrite": "${request_uri}dir",
- "pass": "routes",
- },
- },
- {"action": {"share": index_path}},
- ],
- }
- )
+def test_rewrite_share(temp_dir):
+ os.makedirs(f'{temp_dir}/dir')
+ os.makedirs(f'{temp_dir}/foo')
- assert self.get(url='/foo')['body'] == 'fooindex'
+ with open(f'{temp_dir}/foo/index.html', 'w') as fooindex:
+ fooindex.write('fooindex')
- # different action block
+ # same action block
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"uri": "/foo"},
- "action": {
- "rewrite": "${request_uri}dir",
- "pass": "routes",
- },
- },
- {
- "action": {
- "share": f'{temp_dir}/dir',
- }
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "action": {
+ "rewrite": "${request_uri}dir",
+ "share": f'{temp_dir}$uri',
+ }
+ }
+ ],
+ }
+ )
+
+ resp = client.get()
+ assert resp['status'] == 301, 'redirect status'
+ assert resp['headers']['Location'] == '/dir/', 'redirect Location'
+
+ # request_uri
+
+ index_path = f'{temp_dir}${{request_uri}}/index.html'
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/foo"},
+ "action": {
+ "rewrite": "${request_uri}dir",
+ "pass": "routes",
},
- ],
- }
- )
- resp = self.get(url='/foo')
- assert resp['status'] == 301, 'redirect status 2'
- assert resp['headers']['Location'] == '/foodir/', 'redirect Location 2'
+ },
+ {"action": {"share": index_path}},
+ ],
+ }
+ )
- def test_rewrite_invalid(self, skip_alert):
- skip_alert(r'failed to apply new conf')
+ assert client.get(url='/foo')['body'] == 'fooindex'
- def check_rewrite(rewrite):
- assert 'error' in self.conf(
- [
- {
- "match": {"uri": "/"},
- "action": {"rewrite": rewrite, "pass": "routes"},
+ # different action block
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"uri": "/foo"},
+ "action": {
+ "rewrite": "${request_uri}dir",
+ "pass": "routes",
},
- {"action": {"return": 200}},
- ],
- 'routes',
- )
+ },
+ {
+ "action": {
+ "share": f'{temp_dir}/dir',
+ }
+ },
+ ],
+ }
+ )
+ resp = client.get(url='/foo')
+ assert resp['status'] == 301, 'redirect status 2'
+ assert resp['headers']['Location'] == '/foodir/', 'redirect Location 2'
+
+
+def test_rewrite_invalid(skip_alert):
+ skip_alert(r'failed to apply new conf')
+
+ def check_rewrite(rewrite):
+ assert 'error' in client.conf(
+ [
+ {
+ "match": {"uri": "/"},
+ "action": {"rewrite": rewrite, "pass": "routes"},
+ },
+ {"action": {"return": 200}},
+ ],
+ 'routes',
+ )
- check_rewrite("/$blah")
- check_rewrite(["/"])
+ check_rewrite("/$blah")
+ check_rewrite(["/"])
diff --git a/test/test_routing.py b/test/test_routing.py
index a4806d5c..a18edb04 100644
--- a/test/test_routing.py
+++ b/test/test_routing.py
@@ -1,1914 +1,2008 @@
# -*- coding: utf-8 -*-
import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'any'}}
-class TestRouting(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def setup_method(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "match": {"method": "GET"},
- "action": {"return": 200},
- }
- ],
- "applications": {},
- }
- ), 'routing configure'
- def route(self, route):
- return self.conf([route], 'routes')
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {"method": "GET"},
+ "action": {"return": 200},
+ }
+ ],
+ "applications": {},
+ }
+ ), 'routing configure'
- def route_match(self, match):
- assert 'success' in self.route(
- {"match": match, "action": {"return": 200}}
- ), 'route match configure'
- def route_match_invalid(self, match):
- assert 'error' in self.route(
- {"match": match, "action": {"return": 200}}
- ), 'route match configure invalid'
+def route(route):
+ return client.conf([route], 'routes')
- def host(self, host, status):
- assert (
- self.get(headers={'Host': host, 'Connection': 'close'})['status']
- == status
- ), 'match host'
- def cookie(self, cookie, status):
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': cookie,
- 'Connection': 'close',
- },
- )['status']
- == status
- ), 'match cookie'
+def route_match(match):
+ assert 'success' in route(
+ {"match": match, "action": {"return": 200}}
+ ), 'route match configure'
- def test_routes_match_method_positive(self):
- assert self.get()['status'] == 200, 'GET'
- assert self.post()['status'] == 404, 'POST'
- def test_routes_match_method_positive_many(self):
- self.route_match({"method": ["GET", "POST"]})
+def route_match_invalid(match):
+ assert 'error' in route(
+ {"match": match, "action": {"return": 200}}
+ ), 'route match configure invalid'
- assert self.get()['status'] == 200, 'GET'
- assert self.post()['status'] == 200, 'POST'
- assert self.delete()['status'] == 404, 'DELETE'
- def test_routes_match_method_negative(self):
- self.route_match({"method": "!GET"})
+def host(host, status):
+ assert (
+ client.get(headers={'Host': host, 'Connection': 'close'})['status']
+ == status
+ ), 'match host'
- assert self.get()['status'] == 404, 'GET'
- assert self.post()['status'] == 200, 'POST'
- def test_routes_match_method_negative_many(self):
- self.route_match({"method": ["!GET", "!POST"]})
+def cookie(cookie, status):
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': cookie,
+ 'Connection': 'close',
+ },
+ )['status']
+ == status
+ ), 'match cookie'
- assert self.get()['status'] == 404, 'GET'
- assert self.post()['status'] == 404, 'POST'
- assert self.delete()['status'] == 200, 'DELETE'
- def test_routes_match_method_wildcard_left(self):
- self.route_match({"method": "*ET"})
+def test_routes_match_method_positive():
+ assert client.get()['status'] == 200, 'GET'
+ assert client.post()['status'] == 404, 'POST'
- assert self.get()['status'] == 200, 'GET'
- assert self.post()['status'] == 404, 'POST'
- def test_routes_match_method_wildcard_right(self):
- self.route_match({"method": "GE*"})
+def test_routes_match_method_positive_many():
+ route_match({"method": ["GET", "POST"]})
- assert self.get()['status'] == 200, 'GET'
- assert self.post()['status'] == 404, 'POST'
+ assert client.get()['status'] == 200, 'GET'
+ assert client.post()['status'] == 200, 'POST'
+ assert client.delete()['status'] == 404, 'DELETE'
- def test_routes_match_method_wildcard_left_right(self):
- self.route_match({"method": "*GET*"})
- assert self.get()['status'] == 200, 'GET'
- assert self.post()['status'] == 404, 'POST'
+def test_routes_match_method_negative():
+ route_match({"method": "!GET"})
- def test_routes_match_method_wildcard(self):
- self.route_match({"method": "*"})
+ assert client.get()['status'] == 404, 'GET'
+ assert client.post()['status'] == 200, 'POST'
- assert self.get()['status'] == 200, 'GET'
- def test_routes_match_invalid(self):
- self.route_match_invalid({"method": "**"})
+def test_routes_match_method_negative_many():
+ route_match({"method": ["!GET", "!POST"]})
- def test_routes_match_valid(self):
- self.route_match({"method": "blah*"})
- self.route_match({"host": "*blah*blah"})
- self.route_match({"host": "blah*blah*blah"})
- self.route_match({"host": "blah*blah*"})
+ assert client.get()['status'] == 404, 'GET'
+ assert client.post()['status'] == 404, 'POST'
+ assert client.delete()['status'] == 200, 'DELETE'
- def test_routes_match_empty_exact(self):
- self.route_match({"uri": ""})
- assert self.get()['status'] == 404
- self.route_match({"uri": "/"})
- assert self.get()['status'] == 200
- assert self.get(url='/blah')['status'] == 404
+def test_routes_match_method_wildcard_left():
+ route_match({"method": "*ET"})
- def test_routes_match_negative(self):
- self.route_match({"uri": "!"})
- assert self.get()['status'] == 200
+ assert client.get()['status'] == 200, 'GET'
+ assert client.post()['status'] == 404, 'POST'
- self.route_match({"uri": "!*"})
- assert self.get()['status'] == 404
- self.route_match({"uri": "!/"})
- assert self.get()['status'] == 404
- assert self.get(url='/blah')['status'] == 200
+def test_routes_match_method_wildcard_right():
+ route_match({"method": "GE*"})
- self.route_match({"uri": "!*blah"})
- assert self.get()['status'] == 200
- assert self.get(url='/bla')['status'] == 200
- assert self.get(url='/blah')['status'] == 404
- assert self.get(url='/blah1')['status'] == 200
+ assert client.get()['status'] == 200, 'GET'
+ assert client.post()['status'] == 404, 'POST'
- self.route_match({"uri": "!/blah*1*"})
- assert self.get()['status'] == 200
- assert self.get(url='/blah')['status'] == 200
- assert self.get(url='/blah1')['status'] == 404
- assert self.get(url='/blah12')['status'] == 404
- assert self.get(url='/blah2')['status'] == 200
- def test_routes_match_wildcard_middle(self):
- self.route_match({"host": "ex*le"})
+def test_routes_match_method_wildcard_left_right():
+ route_match({"method": "*GET*"})
- self.host('example', 200)
- self.host('www.example', 404)
- self.host('example.com', 404)
- self.host('exampl', 404)
+ assert client.get()['status'] == 200, 'GET'
+ assert client.post()['status'] == 404, 'POST'
- def test_routes_match_method_case_insensitive(self):
- self.route_match({"method": "get"})
- assert self.get()['status'] == 200, 'GET'
+def test_routes_match_method_wildcard():
+ route_match({"method": "*"})
- def test_routes_match_wildcard_left_case_insensitive(self):
- self.route_match({"method": "*get"})
- assert self.get()['status'] == 200, 'GET'
+ assert client.get()['status'] == 200, 'GET'
- self.route_match({"method": "*et"})
- assert self.get()['status'] == 200, 'GET'
- def test_routes_match_wildcard_middle_case_insensitive(self):
- self.route_match({"method": "g*t"})
+def test_routes_match_invalid():
+ route_match_invalid({"method": "**"})
- assert self.get()['status'] == 200, 'GET'
- def test_routes_match_wildcard_right_case_insensitive(self):
- self.route_match({"method": "get*"})
- assert self.get()['status'] == 200, 'GET'
+def test_routes_match_valid():
+ route_match({"method": "blah*"})
+ route_match({"host": "*blah*blah"})
+ route_match({"host": "blah*blah*blah"})
+ route_match({"host": "blah*blah*"})
- self.route_match({"method": "ge*"})
- assert self.get()['status'] == 200, 'GET'
- def test_routes_match_wildcard_substring_case_insensitive(self):
- self.route_match({"method": "*et*"})
+def test_routes_match_empty_exact():
+ route_match({"uri": ""})
+ assert client.get()['status'] == 404
- assert self.get()['status'] == 200, 'GET'
+ route_match({"uri": "/"})
+ assert client.get()['status'] == 200
+ assert client.get(url='/blah')['status'] == 404
- def test_routes_match_wildcard_left_case_sensitive(self):
- self.route_match({"uri": "*blah"})
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/BLAH')['status'] == 404, '/BLAH'
+def test_routes_match_negative():
+ route_match({"uri": "!"})
+ assert client.get()['status'] == 200
- def test_routes_match_wildcard_middle_case_sensitive(self):
- self.route_match({"uri": "/b*h"})
+ route_match({"uri": "!*"})
+ assert client.get()['status'] == 404
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/BLAH')['status'] == 404, '/BLAH'
+ route_match({"uri": "!/"})
+ assert client.get()['status'] == 404
+ assert client.get(url='/blah')['status'] == 200
- def test_route_match_wildcards_ordered(self):
- self.route_match({"uri": "/a*x*y*"})
+ route_match({"uri": "!*blah"})
+ assert client.get()['status'] == 200
+ assert client.get(url='/bla')['status'] == 200
+ assert client.get(url='/blah')['status'] == 404
+ assert client.get(url='/blah1')['status'] == 200
- assert self.get(url='/axy')['status'] == 200, '/axy'
- assert self.get(url='/ayx')['status'] == 404, '/ayx'
+ route_match({"uri": "!/blah*1*"})
+ assert client.get()['status'] == 200
+ assert client.get(url='/blah')['status'] == 200
+ assert client.get(url='/blah1')['status'] == 404
+ assert client.get(url='/blah12')['status'] == 404
+ assert client.get(url='/blah2')['status'] == 200
- def test_route_match_wildcards_adjust_start(self):
- self.route_match({"uri": "/bla*bla*"})
- assert self.get(url='/bla_foo')['status'] == 404, '/bla_foo'
+def test_routes_match_wildcard_middle():
+ route_match({"host": "ex*le"})
- def test_route_match_wildcards_adjust_start_substr(self):
- self.route_match({"uri": "*bla*bla*"})
+ host('example', 200)
+ host('www.example', 404)
+ host('example.com', 404)
+ host('exampl', 404)
- assert self.get(url='/bla_foo')['status'] == 404, '/bla_foo'
- def test_route_match_wildcards_adjust_end(self):
- self.route_match({"uri": "/bla*bla"})
+def test_routes_match_method_case_insensitive():
+ route_match({"method": "get"})
- assert self.get(url='/foo_bla')['status'] == 404, '/foo_bla'
+ assert client.get()['status'] == 200, 'GET'
- def test_routes_match_wildcard_right_case_sensitive(self):
- self.route_match({"uri": "/bla*"})
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/BLAH')['status'] == 404, '/BLAH'
+def test_routes_match_wildcard_left_case_insensitive():
+ route_match({"method": "*get"})
+ assert client.get()['status'] == 200, 'GET'
- def test_routes_match_wildcard_substring_case_sensitive(self):
- self.route_match({"uri": "*bla*"})
+ route_match({"method": "*et"})
+ assert client.get()['status'] == 200, 'GET'
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/BLAH')['status'] == 404, '/BLAH'
- def test_routes_match_many_wildcard_substrings_case_sensitive(self):
- self.route_match({"uri": "*a*B*c*"})
+def test_routes_match_wildcard_middle_case_insensitive():
+ route_match({"method": "g*t"})
- assert self.get(url='/blah-a-B-c-blah')['status'] == 200
- assert self.get(url='/a-B-c')['status'] == 200
- assert self.get(url='/aBc')['status'] == 200
- assert self.get(url='/aBCaBbc')['status'] == 200
- assert self.get(url='/ABc')['status'] == 404
+ assert client.get()['status'] == 200, 'GET'
- def test_routes_empty_regex(self):
- if not option.available['modules']['regex']:
- pytest.skip('requires regex')
- self.route_match({"uri": "~"})
- assert self.get(url='/')['status'] == 200, 'empty regexp'
- assert self.get(url='/anything')['status'] == 200, '/anything'
+def test_routes_match_wildcard_right_case_insensitive():
+ route_match({"method": "get*"})
+ assert client.get()['status'] == 200, 'GET'
- self.route_match({"uri": "!~"})
- assert self.get(url='/')['status'] == 404, 'empty regexp 2'
- assert self.get(url='/nothing')['status'] == 404, '/nothing'
+ route_match({"method": "ge*"})
+ assert client.get()['status'] == 200, 'GET'
- def test_routes_bad_regex(self):
- if not option.available['modules']['regex']:
- pytest.skip('requires regex')
- assert 'error' in self.route(
- {"match": {"uri": "~/bl[ah"}, "action": {"return": 200}}
- ), 'bad regex'
+def test_routes_match_wildcard_substring_case_insensitive():
+ route_match({"method": "*et*"})
- status = self.route(
- {"match": {"uri": "~(?R)?z"}, "action": {"return": 200}}
- )
- if 'error' not in status:
- assert self.get(url='/nothing_z')['status'] == 500, '/nothing_z'
+ assert client.get()['status'] == 200, 'GET'
- status = self.route(
- {"match": {"uri": "~((?1)?z)"}, "action": {"return": 200}}
- )
- if 'error' not in status:
- assert self.get(url='/nothing_z')['status'] == 500, '/nothing_z'
- def test_routes_match_regex_case_sensitive(self):
- if not option.available['modules']['regex']:
- pytest.skip('requires regex')
+def test_routes_match_wildcard_left_case_sensitive():
+ route_match({"uri": "*blah"})
- self.route_match({"uri": "~/bl[ah]"})
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/BLAH')['status'] == 404, '/BLAH'
- assert self.get(url='/rlah')['status'] == 404, '/rlah'
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/blh')['status'] == 200, '/blh'
- assert self.get(url='/BLAH')['status'] == 404, '/BLAH'
- def test_routes_match_regex_negative_case_sensitive(self):
- if not option.available['modules']['regex']:
- pytest.skip('requires regex')
+def test_routes_match_wildcard_middle_case_sensitive():
+ route_match({"uri": "/b*h"})
- self.route_match({"uri": "!~/bl[ah]"})
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/BLAH')['status'] == 404, '/BLAH'
- assert self.get(url='/rlah')['status'] == 200, '/rlah'
- assert self.get(url='/blah')['status'] == 404, '/blah'
- assert self.get(url='/blh')['status'] == 404, '/blh'
- assert self.get(url='/BLAH')['status'] == 200, '/BLAH'
- def test_routes_pass_encode(self):
- python_dir = f'{option.test_dir}/python'
+def test_route_match_wildcards_ordered():
+ route_match({"uri": "/a*x*y*"})
- def check_pass(path, name):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": f'applications/{path}'}},
- "applications": {
- name: {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{python_dir}/empty',
- "working_directory": f'{python_dir}/empty',
- "module": "wsgi",
- }
- },
- }
- )
+ assert client.get(url='/axy')['status'] == 200, '/axy'
+ assert client.get(url='/ayx')['status'] == 404, '/ayx'
- assert self.get()['status'] == 200
- check_pass("%25", "%")
- check_pass("blah%2Fblah", "blah/blah")
- check_pass("%2Fblah%2F%2Fblah%2F", "/blah//blah/")
- check_pass("%20blah%252Fblah%7E", " blah%2Fblah~")
+def test_route_match_wildcards_adjust_start():
+ route_match({"uri": "/bla*bla*"})
- def check_pass_error(path, name):
- assert 'error' in self.conf(
- {
- "listeners": {"*:7080": {"pass": f'applications/{path}'}},
- "applications": {
- name: {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{python_dir}/empty',
- "working_directory": f'{python_dir}/empty',
- "module": "wsgi",
- }
- },
- }
- )
+ assert client.get(url='/bla_foo')['status'] == 404, '/bla_foo'
- check_pass_error("%", "%")
- check_pass_error("%1", "%1")
- def test_routes_absent(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7081": {"pass": "applications/empty"}},
- "applications": {
- "empty": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": f'{option.test_dir}/python/empty',
- "working_directory": f'{option.test_dir}/python/empty',
- "module": "wsgi",
- }
- },
- }
- )
+def test_route_match_wildcards_adjust_start_substr():
+ route_match({"uri": "*bla*bla*"})
- assert self.get(port=7081)['status'] == 200, 'routes absent'
+ assert client.get(url='/bla_foo')['status'] == 404, '/bla_foo'
- def test_routes_pass_invalid(self):
- assert 'error' in self.conf(
- {"pass": "routes/blah"}, 'listeners/*:7080'
- ), 'routes invalid'
- def test_route_empty(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/main"}},
- "routes": {"main": []},
- "applications": {},
- }
- ), 'route empty configure'
+def test_route_match_wildcards_adjust_end():
+ route_match({"uri": "/bla*bla"})
- assert self.get()['status'] == 404, 'route empty'
+ assert client.get(url='/foo_bla')['status'] == 404, '/foo_bla'
- def test_routes_route_empty(self):
- assert 'success' in self.conf(
- {}, 'listeners'
- ), 'routes empty listeners configure'
- assert 'success' in self.conf({}, 'routes'), 'routes empty configure'
+def test_routes_match_wildcard_right_case_sensitive():
+ route_match({"uri": "/bla*"})
- def test_routes_route_match_absent(self):
- assert 'success' in self.conf(
- [{"action": {"return": 200}}], 'routes'
- ), 'route match absent configure'
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/BLAH')['status'] == 404, '/BLAH'
- assert self.get()['status'] == 200, 'route match absent'
- def test_routes_route_action_absent(self, skip_alert):
- skip_alert(r'failed to apply new conf')
+def test_routes_match_wildcard_substring_case_sensitive():
+ route_match({"uri": "*bla*"})
- assert 'error' in self.conf(
- [{"match": {"method": "GET"}}], 'routes'
- ), 'route pass absent configure'
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/BLAH')['status'] == 404, '/BLAH'
- def test_routes_route_pass(self):
- assert 'success' in self.conf(
- {
- "applications": {
- "app": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": "/app",
- "module": "wsgi",
- }
- },
- "upstreams": {
- "one": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- "two": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- },
- }
- )
- assert 'success' in self.conf(
- [{"action": {"pass": "routes"}}], 'routes'
- )
- assert 'success' in self.conf(
- [{"action": {"pass": "applications/app"}}], 'routes'
- )
- assert 'success' in self.conf(
- [{"action": {"pass": "upstreams/one"}}], 'routes'
- )
+def test_routes_match_many_wildcard_substrings_case_sensitive():
+ route_match({"uri": "*a*B*c*"})
+
+ assert client.get(url='/blah-a-B-c-blah')['status'] == 200
+ assert client.get(url='/a-B-c')['status'] == 200
+ assert client.get(url='/aBc')['status'] == 200
+ assert client.get(url='/aBCaBbc')['status'] == 200
+ assert client.get(url='/ABc')['status'] == 404
+
+
+def test_routes_empty_regex(require):
+ require({'modules': {'regex': True}})
+
+ route_match({"uri": "~"})
+ assert client.get(url='/')['status'] == 200, 'empty regexp'
+ assert client.get(url='/anything')['status'] == 200, '/anything'
+
+ route_match({"uri": "!~"})
+ assert client.get(url='/')['status'] == 404, 'empty regexp 2'
+ assert client.get(url='/nothing')['status'] == 404, '/nothing'
+
+
+def test_routes_bad_regex(require):
+ require({'modules': {'regex': True}})
+
+ assert 'error' in route(
+ {"match": {"uri": "~/bl[ah"}, "action": {"return": 200}}
+ ), 'bad regex'
+
+ status = route({"match": {"uri": "~(?R)?z"}, "action": {"return": 200}})
+ if 'error' not in status:
+ assert client.get(url='/nothing_z')['status'] == 500, '/nothing_z'
- def test_routes_route_pass_absent(self):
- assert 'error' in self.conf(
- [{"match": {"method": "GET"}, "action": {}}], 'routes'
- ), 'route pass absent configure'
+ status = route({"match": {"uri": "~((?1)?z)"}, "action": {"return": 200}})
+ if 'error' not in status:
+ assert client.get(url='/nothing_z')['status'] == 500, '/nothing_z'
- def test_routes_route_pass_invalid(self):
- assert 'success' in self.conf(
+
+def test_routes_match_regex_case_sensitive(require):
+ require({'modules': {'regex': True}})
+
+ route_match({"uri": "~/bl[ah]"})
+
+ assert client.get(url='/rlah')['status'] == 404, '/rlah'
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/blh')['status'] == 200, '/blh'
+ assert client.get(url='/BLAH')['status'] == 404, '/BLAH'
+
+
+def test_routes_match_regex_negative_case_sensitive(require):
+ require({'modules': {'regex': True}})
+
+ route_match({"uri": "!~/bl[ah]"})
+
+ assert client.get(url='/rlah')['status'] == 200, '/rlah'
+ assert client.get(url='/blah')['status'] == 404, '/blah'
+ assert client.get(url='/blh')['status'] == 404, '/blh'
+ assert client.get(url='/BLAH')['status'] == 200, '/BLAH'
+
+
+def test_routes_pass_encode():
+ python_dir = f'{option.test_dir}/python'
+
+ def check_pass(path, name):
+ assert 'success' in client.conf(
{
+ "listeners": {"*:7080": {"pass": f'applications/{path}'}},
"applications": {
- "app": {
- "type": self.get_application_type(),
+ name: {
+ "type": client.get_application_type(),
"processes": {"spare": 0},
- "path": "/app",
+ "path": f'{python_dir}/empty',
+ "working_directory": f'{python_dir}/empty',
"module": "wsgi",
}
},
- "upstreams": {
- "one": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- "two": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- },
}
)
- assert 'error' in self.conf(
- [{"action": {"pass": "blah"}}], 'routes'
- ), 'route pass invalid'
- assert 'error' in self.conf(
- [{"action": {"pass": "routes/blah"}}], 'routes'
- ), 'route pass routes invalid'
- assert 'error' in self.conf(
- [{"action": {"pass": "applications/blah"}}], 'routes'
- ), 'route pass applications invalid'
- assert 'error' in self.conf(
- [{"action": {"pass": "upstreams/blah"}}], 'routes'
- ), 'route pass upstreams invalid'
-
- def test_routes_action_unique(self, temp_dir):
- assert 'success' in self.conf(
+ assert client.get()['status'] == 200
+
+ check_pass("%25", "%")
+ check_pass("blah%2Fblah", "blah/blah")
+ check_pass("%2Fblah%2F%2Fblah%2F", "/blah//blah/")
+ check_pass("%20blah%252Fblah%7E", " blah%2Fblah~")
+
+ def check_pass_error(path, name):
+ assert 'error' in client.conf(
{
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/app"},
- },
- "routes": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
+ "listeners": {"*:7080": {"pass": f'applications/{path}'}},
"applications": {
- "app": {
- "type": self.get_application_type(),
+ name: {
+ "type": client.get_application_type(),
"processes": {"spare": 0},
- "path": "/app",
+ "path": f'{python_dir}/empty',
+ "working_directory": f'{python_dir}/empty',
"module": "wsgi",
}
},
}
)
- assert 'error' in self.conf(
- {"proxy": "http://127.0.0.1:7081", "share": temp_dir},
- 'routes/0/action',
- ), 'proxy share'
- assert 'error' in self.conf(
- {
- "proxy": "http://127.0.0.1:7081",
- "pass": "applications/app",
+ check_pass_error("%", "%")
+ check_pass_error("%1", "%1")
+
+
+def test_routes_absent():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7081": {"pass": "applications/empty"}},
+ "applications": {
+ "empty": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": f'{option.test_dir}/python/empty',
+ "working_directory": f'{option.test_dir}/python/empty',
+ "module": "wsgi",
+ }
},
- 'routes/0/action',
- ), 'proxy pass'
- assert 'error' in self.conf(
- {"share": temp_dir, "pass": "applications/app"},
- 'routes/0/action',
- ), 'share pass'
-
- def test_routes_rules_two(self):
- assert 'success' in self.conf(
- [
- {"match": {"method": "GET"}, "action": {"return": 200}},
- {"match": {"method": "POST"}, "action": {"return": 201}},
- ],
- 'routes',
- ), 'rules two configure'
+ }
+ )
- assert self.get()['status'] == 200, 'rules two match first'
- assert self.post()['status'] == 201, 'rules two match second'
+ assert client.get(port=7081)['status'] == 200, 'routes absent'
- def test_routes_two(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/first"}},
- "routes": {
- "first": [
- {
- "match": {"method": "GET"},
- "action": {"pass": "routes/second"},
- }
- ],
- "second": [
- {
- "match": {"host": "localhost"},
- "action": {"return": 200},
- }
- ],
+
+def test_routes_pass_invalid():
+ assert 'error' in client.conf(
+ {"pass": "routes/blah"}, 'listeners/*:7080'
+ ), 'routes invalid'
+
+
+def test_route_empty():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/main"}},
+ "routes": {"main": []},
+ "applications": {},
+ }
+ ), 'route empty configure'
+
+ assert client.get()['status'] == 404, 'route empty'
+
+
+def test_routes_route_empty():
+ assert 'success' in client.conf(
+ {}, 'listeners'
+ ), 'routes empty listeners configure'
+
+ assert 'success' in client.conf({}, 'routes'), 'routes empty configure'
+
+
+def test_routes_route_match_absent():
+ assert 'success' in client.conf(
+ [{"action": {"return": 200}}], 'routes'
+ ), 'route match absent configure'
+
+ assert client.get()['status'] == 200, 'route match absent'
+
+
+def test_routes_route_action_absent(skip_alert):
+ skip_alert(r'failed to apply new conf')
+
+ assert 'error' in client.conf(
+ [{"match": {"method": "GET"}}], 'routes'
+ ), 'route pass absent configure'
+
+
+def test_routes_route_pass():
+ assert 'success' in client.conf(
+ {
+ "applications": {
+ "app": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ },
+ "upstreams": {
+ "one": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
+ },
},
- "applications": {},
- }
- ), 'routes two configure'
+ "two": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
+ },
+ },
+ },
+ }
+ )
+
+ assert 'success' in client.conf([{"action": {"pass": "routes"}}], 'routes')
+ assert 'success' in client.conf(
+ [{"action": {"pass": "applications/app"}}], 'routes'
+ )
+ assert 'success' in client.conf(
+ [{"action": {"pass": "upstreams/one"}}], 'routes'
+ )
+
+
+def test_routes_route_pass_absent():
+ assert 'error' in client.conf(
+ [{"match": {"method": "GET"}, "action": {}}], 'routes'
+ ), 'route pass absent configure'
+
+
+def test_routes_route_pass_invalid():
+ assert 'success' in client.conf(
+ {
+ "applications": {
+ "app": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ },
+ "upstreams": {
+ "one": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
+ },
+ },
+ "two": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
+ },
+ },
+ },
+ }
+ )
+
+ assert 'error' in client.conf(
+ [{"action": {"pass": "blah"}}], 'routes'
+ ), 'route pass invalid'
+ assert 'error' in client.conf(
+ [{"action": {"pass": "routes/blah"}}], 'routes'
+ ), 'route pass routes invalid'
+ assert 'error' in client.conf(
+ [{"action": {"pass": "applications/blah"}}], 'routes'
+ ), 'route pass applications invalid'
+ assert 'error' in client.conf(
+ [{"action": {"pass": "upstreams/blah"}}], 'routes'
+ ), 'route pass upstreams invalid'
+
+
+def test_routes_action_unique(temp_dir):
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/app"},
+ },
+ "routes": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
+ "applications": {
+ "app": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": "/app",
+ "module": "wsgi",
+ }
+ },
+ }
+ )
+
+ assert 'error' in client.conf(
+ {"proxy": "http://127.0.0.1:7081", "share": temp_dir},
+ 'routes/0/action',
+ ), 'proxy share'
+ assert 'error' in client.conf(
+ {
+ "proxy": "http://127.0.0.1:7081",
+ "pass": "applications/app",
+ },
+ 'routes/0/action',
+ ), 'proxy pass'
+ assert 'error' in client.conf(
+ {"share": temp_dir, "pass": "applications/app"},
+ 'routes/0/action',
+ ), 'share pass'
+
+
+def test_routes_rules_two():
+ assert 'success' in client.conf(
+ [
+ {"match": {"method": "GET"}, "action": {"return": 200}},
+ {"match": {"method": "POST"}, "action": {"return": 201}},
+ ],
+ 'routes',
+ ), 'rules two configure'
+
+ assert client.get()['status'] == 200, 'rules two match first'
+ assert client.post()['status'] == 201, 'rules two match second'
+
+
+def test_routes_two():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/first"}},
+ "routes": {
+ "first": [
+ {
+ "match": {"method": "GET"},
+ "action": {"pass": "routes/second"},
+ }
+ ],
+ "second": [
+ {
+ "match": {"host": "localhost"},
+ "action": {"return": 200},
+ }
+ ],
+ },
+ "applications": {},
+ }
+ ), 'routes two configure'
- assert self.get()['status'] == 200, 'routes two'
+ assert client.get()['status'] == 200, 'routes two'
- def test_routes_match_host_positive(self):
- self.route_match({"host": "localhost"})
- assert self.get()['status'] == 200, 'localhost'
- self.host('localhost.', 200)
- self.host('localhost.', 200)
- self.host('.localhost', 404)
- self.host('www.localhost', 404)
- self.host('localhost1', 404)
+def test_routes_match_host_positive():
+ route_match({"host": "localhost"})
- @pytest.mark.skip('not yet')
- def test_routes_match_host_absent(self):
- self.route_match({"host": "localhost"})
+ assert client.get()['status'] == 200, 'localhost'
+ host('localhost.', 200)
+ host('localhost.', 200)
+ host('.localhost', 404)
+ host('www.localhost', 404)
+ host('localhost1', 404)
- assert (
- self.get(headers={'Connection': 'close'})['status'] == 400
- ), 'match host absent'
- def test_routes_match_host_ipv4(self):
- self.route_match({"host": "127.0.0.1"})
+@pytest.mark.skip('not yet')
+def test_routes_match_host_absent():
+ route_match({"host": "localhost"})
- self.host('127.0.0.1', 200)
- self.host('127.0.0.1:7080', 200)
+ assert (
+ client.get(headers={'Connection': 'close'})['status'] == 400
+ ), 'match host absent'
- def test_routes_match_host_ipv6(self):
- self.route_match({"host": "[::1]"})
- self.host('[::1]', 200)
- self.host('[::1]:7080', 200)
+def test_routes_match_host_ipv4():
+ route_match({"host": "127.0.0.1"})
- def test_routes_match_host_positive_many(self):
- self.route_match({"host": ["localhost", "example.com"]})
+ host('127.0.0.1', 200)
+ host('127.0.0.1:7080', 200)
- assert self.get()['status'] == 200, 'localhost'
- self.host('example.com', 200)
- def test_routes_match_host_positive_and_negative(self):
- self.route_match({"host": ["*example.com", "!www.example.com"]})
+def test_routes_match_host_ipv6():
+ route_match({"host": "[::1]"})
- assert self.get()['status'] == 404, 'localhost'
- self.host('example.com', 200)
- self.host('www.example.com', 404)
- self.host('!www.example.com', 200)
+ host('[::1]', 200)
+ host('[::1]:7080', 200)
- def test_routes_match_host_positive_and_negative_wildcard(self):
- self.route_match({"host": ["*example*", "!www.example*"]})
- self.host('example.com', 200)
- self.host('www.example.com', 404)
+def test_routes_match_host_positive_many():
+ route_match({"host": ["localhost", "example.com"]})
- def test_routes_match_host_case_insensitive(self):
- self.route_match({"host": "Example.com"})
+ assert client.get()['status'] == 200, 'localhost'
+ host('example.com', 200)
- self.host('example.com', 200)
- self.host('EXAMPLE.COM', 200)
- def test_routes_match_host_port(self):
- self.route_match({"host": "example.com"})
+def test_routes_match_host_positive_and_negative():
+ route_match({"host": ["*example.com", "!www.example.com"]})
- self.host('example.com:7080', 200)
+ assert client.get()['status'] == 404, 'localhost'
+ host('example.com', 200)
+ host('www.example.com', 404)
+ host('!www.example.com', 200)
- def test_routes_match_host_empty(self):
- self.route_match({"host": ""})
- self.host('', 200)
- assert (
- self.get(http_10=True, headers={})['status'] == 200
- ), 'match host empty 2'
- assert self.get()['status'] == 404, 'match host empty 3'
-
- def test_routes_match_uri_positive(self):
- self.route_match({"uri": ["/blah", "/slash/"]})
-
- assert self.get()['status'] == 404, '/'
- assert self.get(url='/blah')['status'] == 200, '/blah'
- assert self.get(url='/blah#foo')['status'] == 200, '/blah#foo'
- assert self.get(url='/blah?var')['status'] == 200, '/blah?var'
- assert self.get(url='//blah')['status'] == 200, '//blah'
- assert self.get(url='/slash/foo/../')['status'] == 200, 'relative'
- assert self.get(url='/slash/./')['status'] == 200, '/slash/./'
- assert self.get(url='/slash//.//')['status'] == 200, 'adjacent slashes'
- assert self.get(url='/%')['status'] == 400, 'percent'
- assert self.get(url='/%1')['status'] == 400, 'percent digit'
- assert self.get(url='/%A')['status'] == 400, 'percent letter'
- assert self.get(url='/slash/.?args')['status'] == 200, 'dot args'
- assert self.get(url='/slash/.#frag')['status'] == 200, 'dot frag'
- assert (
- self.get(url='/slash/foo/..?args')['status'] == 200
- ), 'dot dot args'
- assert (
- self.get(url='/slash/foo/..#frag')['status'] == 200
- ), 'dot dot frag'
- assert self.get(url='/slash/.')['status'] == 200, 'trailing dot'
- assert (
- self.get(url='/slash/foo/..')['status'] == 200
- ), 'trailing dot dot'
+def test_routes_match_host_positive_and_negative_wildcard():
+ route_match({"host": ["*example*", "!www.example*"]})
- def test_routes_match_uri_case_sensitive(self):
- self.route_match({"uri": "/BLAH"})
+ host('example.com', 200)
+ host('www.example.com', 404)
- assert self.get(url='/blah')['status'] == 404, '/blah'
- assert self.get(url='/BlaH')['status'] == 404, '/BlaH'
- assert self.get(url='/BLAH')['status'] == 200, '/BLAH'
- def test_routes_match_uri_normalize(self):
- self.route_match({"uri": "/blah"})
+def test_routes_match_host_case_insensitive():
+ route_match({"host": "Example.com"})
- assert self.get(url='/%62%6c%61%68')['status'] == 200, 'normalize'
+ host('example.com', 200)
+ host('EXAMPLE.COM', 200)
- def test_routes_match_empty_array(self):
- self.route_match({"uri": []})
- assert self.get(url='/blah')['status'] == 200, 'empty array'
+def test_routes_match_host_port():
+ route_match({"host": "example.com"})
- def test_routes_reconfigure(self):
- assert 'success' in self.conf([], 'routes'), 'redefine'
- assert self.get()['status'] == 404, 'redefine request'
+ host('example.com:7080', 200)
- assert 'success' in self.conf(
- [{"action": {"return": 200}}], 'routes'
- ), 'redefine 2'
- assert self.get()['status'] == 200, 'redefine request 2'
- assert 'success' in self.conf([], 'routes'), 'redefine 3'
- assert self.get()['status'] == 404, 'redefine request 3'
+def test_routes_match_host_empty():
+ route_match({"host": ""})
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/main"}},
- "routes": {"main": [{"action": {"return": 200}}]},
- "applications": {},
- }
- ), 'redefine 4'
- assert self.get()['status'] == 200, 'redefine request 4'
-
- assert 'success' in self.conf_delete('routes/main/0'), 'redefine 5'
- assert self.get()['status'] == 404, 'redefine request 5'
-
- assert 'success' in self.conf_post(
- {"action": {"return": 200}}, 'routes/main'
- ), 'redefine 6'
- assert self.get()['status'] == 200, 'redefine request 6'
-
- assert 'error' in self.conf(
- {"action": {"return": 200}}, 'routes/main/2'
- ), 'redefine 7'
- assert 'success' in self.conf(
- {"action": {"return": 201}}, 'routes/main/1'
- ), 'redefine 8'
-
- assert len(self.conf_get('routes/main')) == 2, 'redefine conf 8'
- assert self.get()['status'] == 200, 'redefine request 8'
-
- def test_routes_edit(self):
- self.route_match({"method": "GET"})
-
- assert self.get()['status'] == 200, 'routes edit GET'
- assert self.post()['status'] == 404, 'routes edit POST'
-
- assert 'success' in self.conf_post(
- {"match": {"method": "POST"}, "action": {"return": 200}},
- 'routes',
- ), 'routes edit configure 2'
- assert 'GET' == self.conf_get(
- 'routes/0/match/method'
- ), 'routes edit configure 2 check'
- assert 'POST' == self.conf_get(
- 'routes/1/match/method'
- ), 'routes edit configure 2 check 2'
-
- assert self.get()['status'] == 200, 'routes edit GET 2'
- assert self.post()['status'] == 200, 'routes edit POST 2'
-
- assert 'success' in self.conf_delete(
- 'routes/0'
- ), 'routes edit configure 3'
-
- assert self.get()['status'] == 404, 'routes edit GET 3'
- assert self.post()['status'] == 200, 'routes edit POST 3'
-
- assert 'error' in self.conf_delete(
- 'routes/1'
- ), 'routes edit configure invalid'
- assert 'error' in self.conf_delete(
- 'routes/-1'
- ), 'routes edit configure invalid 2'
- assert 'error' in self.conf_delete(
- 'routes/blah'
- ), 'routes edit configure invalid 3'
-
- assert self.get()['status'] == 404, 'routes edit GET 4'
- assert self.post()['status'] == 200, 'routes edit POST 4'
-
- assert 'success' in self.conf_delete(
- 'routes/0'
- ), 'routes edit configure 5'
-
- assert self.get()['status'] == 404, 'routes edit GET 5'
- assert self.post()['status'] == 404, 'routes edit POST 5'
-
- assert 'success' in self.conf_post(
- {"match": {"method": "POST"}, "action": {"return": 200}},
- 'routes',
- ), 'routes edit configure 6'
-
- assert self.get()['status'] == 404, 'routes edit GET 6'
- assert self.post()['status'] == 200, 'routes edit POST 6'
-
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/main"}},
- "routes": {"main": [{"action": {"return": 200}}]},
- "applications": {},
- }
- ), 'route edit configure 7'
-
- assert 'error' in self.conf_delete(
- 'routes/0'
- ), 'routes edit configure invalid 4'
- assert 'error' in self.conf_delete(
- 'routes/main'
- ), 'routes edit configure invalid 5'
-
- assert self.get()['status'] == 200, 'routes edit GET 7'
-
- assert 'success' in self.conf_delete(
- 'listeners/*:7080'
- ), 'route edit configure 8'
- assert 'success' in self.conf_delete(
- 'routes/main'
- ), 'route edit configure 9'
-
- def test_match_edit(self, skip_alert):
- skip_alert(r'failed to apply new conf')
-
- self.route_match({"method": ["GET", "POST"]})
-
- assert self.get()['status'] == 200, 'match edit GET'
- assert self.post()['status'] == 200, 'match edit POST'
- assert self.put()['status'] == 404, 'match edit PUT'
-
- assert 'success' in self.conf_post(
- '\"PUT\"', 'routes/0/match/method'
- ), 'match edit configure 2'
- assert ['GET', 'POST', 'PUT'] == self.conf_get(
- 'routes/0/match/method'
- ), 'match edit configure 2 check'
-
- assert self.get()['status'] == 200, 'match edit GET 2'
- assert self.post()['status'] == 200, 'match edit POST 2'
- assert self.put()['status'] == 200, 'match edit PUT 2'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/method/1'
- ), 'match edit configure 3'
- assert ['GET', 'PUT'] == self.conf_get(
- 'routes/0/match/method'
- ), 'match edit configure 3 check'
-
- assert self.get()['status'] == 200, 'match edit GET 3'
- assert self.post()['status'] == 404, 'match edit POST 3'
- assert self.put()['status'] == 200, 'match edit PUT 3'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/method/1'
- ), 'match edit configure 4'
- assert ['GET'] == self.conf_get(
- 'routes/0/match/method'
- ), 'match edit configure 4 check'
-
- assert self.get()['status'] == 200, 'match edit GET 4'
- assert self.post()['status'] == 404, 'match edit POST 4'
- assert self.put()['status'] == 404, 'match edit PUT 4'
-
- assert 'error' in self.conf_delete(
- 'routes/0/match/method/1'
- ), 'match edit configure invalid'
- assert 'error' in self.conf_delete(
- 'routes/0/match/method/-1'
- ), 'match edit configure invalid 2'
- assert 'error' in self.conf_delete(
- 'routes/0/match/method/blah'
- ), 'match edit configure invalid 3'
- assert ['GET'] == self.conf_get(
- 'routes/0/match/method'
- ), 'match edit configure 5 check'
-
- assert self.get()['status'] == 200, 'match edit GET 5'
- assert self.post()['status'] == 404, 'match edit POST 5'
- assert self.put()['status'] == 404, 'match edit PUT 5'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/method/0'
- ), 'match edit configure 6'
- assert [] == self.conf_get(
- 'routes/0/match/method'
- ), 'match edit configure 6 check'
-
- assert self.get()['status'] == 200, 'match edit GET 6'
- assert self.post()['status'] == 200, 'match edit POST 6'
- assert self.put()['status'] == 200, 'match edit PUT 6'
-
- assert 'success' in self.conf(
- '"GET"', 'routes/0/match/method'
- ), 'match edit configure 7'
-
- assert self.get()['status'] == 200, 'match edit GET 7'
- assert self.post()['status'] == 404, 'match edit POST 7'
- assert self.put()['status'] == 404, 'match edit PUT 7'
-
- assert 'error' in self.conf_delete(
- 'routes/0/match/method/0'
- ), 'match edit configure invalid 5'
- assert 'error' in self.conf(
- {}, 'routes/0/action'
- ), 'match edit configure invalid 6'
-
- assert 'success' in self.conf(
- {}, 'routes/0/match'
- ), 'match edit configure 8'
-
- assert self.get()['status'] == 200, 'match edit GET 8'
-
- def test_routes_match_rules(self):
- self.route_match({"method": "GET", "host": "localhost", "uri": "/"})
-
- assert self.get()['status'] == 200, 'routes match rules'
-
- def test_routes_loop(self):
- assert 'success' in self.route(
- {"match": {"uri": "/"}, "action": {"pass": "routes"}}
- ), 'routes loop configure'
-
- assert self.get()['status'] == 500, 'routes loop'
-
- def test_routes_match_headers(self):
- self.route_match({"headers": {"host": "localhost"}})
-
- assert self.get()['status'] == 200, 'match headers'
- self.host('Localhost', 200)
- self.host('localhost.com', 404)
- self.host('llocalhost', 404)
- self.host('host', 404)
-
- def test_routes_match_headers_multiple(self):
- self.route_match({"headers": {"host": "localhost", "x-blah": "test"}})
-
- assert self.get()['status'] == 404, 'match headers multiple'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": "test",
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers multiple 2'
+ host('', 200)
+ assert (
+ client.get(http_10=True, headers={})['status'] == 200
+ ), 'match host empty 2'
+ assert client.get()['status'] == 404, 'match host empty 3'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": "",
- "Connection": "close",
- }
- )['status']
- == 404
- ), 'match headers multiple 3'
- def test_routes_match_headers_multiple_values(self):
- self.route_match({"headers": {"x-blah": "test"}})
+def test_routes_match_uri_positive():
+ route_match({"uri": ["/blah", "/slash/"]})
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": ["test", "test", "test"],
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers multiple values'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": ["test", "blah", "test"],
- "Connection": "close",
- }
- )['status']
- == 404
- ), 'match headers multiple values 2'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": ["test", "", "test"],
- "Connection": "close",
- }
- )['status']
- == 404
- ), 'match headers multiple values 3'
+ assert client.get()['status'] == 404, '/'
+ assert client.get(url='/blah')['status'] == 200, '/blah'
+ assert client.get(url='/blah#foo')['status'] == 200, '/blah#foo'
+ assert client.get(url='/blah?var')['status'] == 200, '/blah?var'
+ assert client.get(url='//blah')['status'] == 200, '//blah'
+ assert client.get(url='/slash/foo/../')['status'] == 200, 'relative'
+ assert client.get(url='/slash/./')['status'] == 200, '/slash/./'
+ assert client.get(url='/slash//.//')['status'] == 200, 'adjacent slashes'
+ assert client.get(url='/%')['status'] == 400, 'percent'
+ assert client.get(url='/%1')['status'] == 400, 'percent digit'
+ assert client.get(url='/%A')['status'] == 400, 'percent letter'
+ assert client.get(url='/slash/.?args')['status'] == 200, 'dot args'
+ assert client.get(url='/slash/.#frag')['status'] == 200, 'dot frag'
+ assert client.get(url='/slash/foo/..?args')['status'] == 200, 'dot dot args'
+ assert client.get(url='/slash/foo/..#frag')['status'] == 200, 'dot dot frag'
+ assert client.get(url='/slash/.')['status'] == 200, 'trailing dot'
+ assert client.get(url='/slash/foo/..')['status'] == 200, 'trailing dot dot'
- def test_routes_match_headers_multiple_rules(self):
- self.route_match({"headers": {"x-blah": ["test", "blah"]}})
- assert self.get()['status'] == 404, 'match headers multiple rules'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": "test",
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers multiple rules 2'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": "blah",
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers multiple rules 3'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": ["test", "blah", "test"],
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers multiple rules 4'
+def test_routes_match_uri_case_sensitive():
+ route_match({"uri": "/BLAH"})
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "X-blah": ["blah", ""],
- "Connection": "close",
- }
- )['status']
- == 404
- ), 'match headers multiple rules 5'
+ assert client.get(url='/blah')['status'] == 404, '/blah'
+ assert client.get(url='/BlaH')['status'] == 404, '/BlaH'
+ assert client.get(url='/BLAH')['status'] == 200, '/BLAH'
- def test_routes_match_headers_case_insensitive(self):
- self.route_match({"headers": {"X-BLAH": "TEST"}})
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "x-blah": "test",
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers case insensitive'
+def test_routes_match_uri_normalize():
+ route_match({"uri": "/blah"})
- def test_routes_match_headers_invalid(self):
- self.route_match_invalid({"headers": ["blah"]})
- self.route_match_invalid({"headers": {"foo": ["bar", {}]}})
- self.route_match_invalid({"headers": {"": "blah"}})
+ assert client.get(url='/%62%6c%61%68')['status'] == 200, 'normalize'
- def test_routes_match_headers_empty_rule(self):
- self.route_match({"headers": {"host": ""}})
- assert self.get()['status'] == 404, 'localhost'
- self.host('', 200)
+def test_routes_match_empty_array():
+ route_match({"uri": []})
- def test_routes_match_headers_empty(self):
- self.route_match({"headers": {}})
- assert self.get()['status'] == 200, 'empty'
+ assert client.get(url='/blah')['status'] == 200, 'empty array'
- self.route_match({"headers": []})
- assert self.get()['status'] == 200, 'empty 2'
- def test_routes_match_headers_rule_array_empty(self):
- self.route_match({"headers": {"blah": []}})
+def test_routes_reconfigure():
+ assert 'success' in client.conf([], 'routes'), 'redefine'
+ assert client.get()['status'] == 404, 'redefine request'
- assert self.get()['status'] == 404, 'array empty'
- assert (
- self.get(
- headers={
- "Host": "localhost",
- "blah": "foo",
- "Connection": "close",
- }
- )['status']
- == 200
- ), 'match headers rule array empty 2'
+ assert 'success' in client.conf(
+ [{"action": {"return": 200}}], 'routes'
+ ), 'redefine 2'
+ assert client.get()['status'] == 200, 'redefine request 2'
- def test_routes_match_headers_array(self):
- self.route_match(
- {
- "headers": [
- {"x-header1": "foo*"},
- {"x-header2": "bar"},
- {"x-header3": ["foo", "bar"]},
- {"x-header1": "bar", "x-header4": "foo"},
- ]
- }
- )
+ assert 'success' in client.conf([], 'routes'), 'redefine 3'
+ assert client.get()['status'] == 404, 'redefine request 3'
- def check_headers(hds):
- hds = dict({"Host": "localhost", "Connection": "close"}, **hds)
- assert self.get(headers=hds)['status'] == 200, 'headers array match'
-
- def check_headers_404(hds):
- hds = dict({"Host": "localhost", "Connection": "close"}, **hds)
- assert (
- self.get(headers=hds)['status'] == 404
- ), 'headers array no match'
-
- assert self.get()['status'] == 404, 'match headers array'
- check_headers({"x-header1": "foo123"})
- check_headers({"x-header2": "bar"})
- check_headers({"x-header3": "bar"})
- check_headers_404({"x-header1": "bar"})
- check_headers({"x-header1": "bar", "x-header4": "foo"})
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/headers/1'
- ), 'match headers array configure 2'
-
- check_headers_404({"x-header2": "bar"})
- check_headers({"x-header3": "foo"})
-
- def test_routes_match_arguments(self):
- self.route_match({"arguments": {"foo": "bar"}})
-
- assert self.get()['status'] == 404, 'args'
- assert self.get(url='/?foo=bar')['status'] == 200, 'args 2'
- assert self.get(url='/?foo=bar1')['status'] == 404, 'args 3'
- assert self.get(url='/?1foo=bar')['status'] == 404, 'args 4'
- assert self.get(url='/?Foo=bar')['status'] == 404, 'case'
- assert self.get(url='/?foo=Bar')['status'] == 404, 'case 2'
-
- def test_routes_match_arguments_chars(self):
- chars = (
- " !\"%23$%25%26'()*%2B,-./0123456789:;<%3D>?@"
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
- )
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/main"}},
+ "routes": {"main": [{"action": {"return": 200}}]},
+ "applications": {},
+ }
+ ), 'redefine 4'
+ assert client.get()['status'] == 200, 'redefine request 4'
- chars_enc = ""
- for h1 in ["2", "3", "4", "5", "6", "7"]:
- for h2 in [
- "0",
- "1",
- "2",
- "3",
- "4",
- "5",
- "6",
- "7",
- "8",
- "9",
- "A",
- "B",
- "C",
- "D",
- "E",
- "F",
- ]:
- chars_enc += f'%{h1}{h2}'
- chars_enc = chars_enc[:-3]
-
- def check_args(args, query):
- self.route_match({"arguments": args})
- assert self.get(url=f'/?{query}')['status'] == 200
-
- check_args({chars: chars}, f'{chars}={chars}')
- check_args({chars: chars}, f'{chars}={chars_enc}')
- check_args({chars: chars}, f'{chars_enc}={chars}')
- check_args({chars: chars}, f'{chars_enc}={chars_enc}')
- check_args({chars_enc: chars_enc}, f'{chars}={chars}')
- check_args({chars_enc: chars_enc}, f'{chars}={chars_enc}')
- check_args({chars_enc: chars_enc}, f'{chars_enc}={chars}')
- check_args({chars_enc: chars_enc}, f'{chars_enc}={chars_enc}')
-
- def test_routes_match_arguments_empty(self):
- self.route_match({"arguments": {}})
- assert self.get()['status'] == 200, 'arguments empty'
-
- self.route_match({"arguments": []})
- assert self.get()['status'] == 200, 'arguments empty 2'
-
- def test_routes_match_arguments_space(self):
- self.route_match({"arguments": {"+fo o%20": "%20b+a r"}})
- assert self.get(url='/? fo o = b a r&')['status'] == 200
- assert self.get(url='/?+fo+o+=+b+a+r&')['status'] == 200
- assert self.get(url='/?%20fo%20o%20=%20b%20a%20r&')['status'] == 200
-
- self.route_match({"arguments": {"%20foo": " bar"}})
- assert self.get(url='/? foo= bar')['status'] == 200
- assert self.get(url='/?+foo=+bar')['status'] == 200
- assert self.get(url='/?%20foo=%20bar')['status'] == 200
- assert self.get(url='/?+foo= bar')['status'] == 200
- assert self.get(url='/?%20foo=+bar')['status'] == 200
-
- def test_routes_match_arguments_equal(self):
- self.route_match({"arguments": {"=": "="}})
- assert self.get(url='/?%3D=%3D')['status'] == 200
- assert self.get(url='/?%3D==')['status'] == 200
- assert self.get(url='/?===')['status'] == 404
- assert self.get(url='/?%3D%3D%3D')['status'] == 404
- assert self.get(url='/?==%3D')['status'] == 404
-
- def test_routes_match_arguments_enc(self):
- self.route_match({"arguments": {"Ю": "н"}})
- assert self.get(url='/?%D0%AE=%D0%BD')['status'] == 200
- assert self.get(url='/?%d0%ae=%d0%Bd')['status'] == 200
-
- def test_routes_match_arguments_hash(self):
- self.route_match({"arguments": {"#": "#"}})
- assert self.get(url='/?%23=%23')['status'] == 200
- assert self.get(url='/?%23=%23#')['status'] == 200
- assert self.get(url='/?#=#')['status'] == 404
- assert self.get(url='/?%23=#')['status'] == 404
-
- def test_routes_match_arguments_wildcard(self):
- self.route_match({"arguments": {"foo": "*"}})
- assert self.get(url='/?foo')['status'] == 200
- assert self.get(url='/?foo=')['status'] == 200
- assert self.get(url='/?foo=blah')['status'] == 200
- assert self.get(url='/?blah=foo')['status'] == 404
-
- self.route_match({"arguments": {"foo": "%25*"}})
- assert self.get(url='/?foo=%xx')['status'] == 200
-
- self.route_match({"arguments": {"foo": "%2A*"}})
- assert self.get(url='/?foo=*xx')['status'] == 200
- assert self.get(url='/?foo=xx')['status'] == 404
-
- self.route_match({"arguments": {"foo": "*%2A"}})
- assert self.get(url='/?foo=xx*')['status'] == 200
- assert self.get(url='/?foo=xx*x')['status'] == 404
-
- self.route_match({"arguments": {"foo": "1*2"}})
- assert self.get(url='/?foo=12')['status'] == 200
- assert self.get(url='/?foo=1blah2')['status'] == 200
- assert self.get(url='/?foo=1%2A2')['status'] == 200
- assert self.get(url='/?foo=x12')['status'] == 404
-
- self.route_match({"arguments": {"foo": "bar*", "%25": "%25"}})
- assert self.get(url='/?foo=barxx&%=%')['status'] == 200
- assert self.get(url='/?foo=barxx&x%=%')['status'] == 404
-
- def test_routes_match_arguments_negative(self):
- self.route_match({"arguments": {"foo": "!"}})
- assert self.get(url='/?bar')['status'] == 404
- assert self.get(url='/?foo')['status'] == 404
- assert self.get(url='/?foo=')['status'] == 404
- assert self.get(url='/?foo=%25')['status'] == 200
-
- self.route_match({"arguments": {"foo": "!*"}})
- assert self.get(url='/?bar')['status'] == 404
- assert self.get(url='/?foo')['status'] == 404
- assert self.get(url='/?foo=')['status'] == 404
- assert self.get(url='/?foo=blah')['status'] == 404
-
- self.route_match({"arguments": {"foo": "!%25"}})
- assert self.get(url='/?foo=blah')['status'] == 200
- assert self.get(url='/?foo=%')['status'] == 404
-
- self.route_match({"arguments": {"foo": "%21blah"}})
- assert self.get(url='/?foo=%21blah')['status'] == 200
- assert self.get(url='/?foo=!blah')['status'] == 200
- assert self.get(url='/?foo=bar')['status'] == 404
-
- self.route_match({"arguments": {"foo": "!!%21*a"}})
- assert self.get(url='/?foo=blah')['status'] == 200
- assert self.get(url='/?foo=!blah')['status'] == 200
- assert self.get(url='/?foo=!!a')['status'] == 404
- assert self.get(url='/?foo=!!bla')['status'] == 404
-
- def test_routes_match_arguments_percent(self):
- self.route_match({"arguments": {"%25": "%25"}})
- assert self.get(url='/?%=%')['status'] == 200
- assert self.get(url='/?%25=%25')['status'] == 200
- assert self.get(url='/?%25=%')['status'] == 200
-
- self.route_match({"arguments": {"%251": "%252"}})
- assert self.get(url='/?%1=%2')['status'] == 200
- assert self.get(url='/?%251=%252')['status'] == 200
- assert self.get(url='/?%251=%2')['status'] == 200
-
- self.route_match({"arguments": {"%25%21%251": "%25%24%252"}})
- assert self.get(url='/?%!%1=%$%2')['status'] == 200
- assert self.get(url='/?%25!%251=%25$%252')['status'] == 200
- assert self.get(url='/?%25!%1=%$%2')['status'] == 200
-
- def test_routes_match_arguments_ampersand(self):
- self.route_match({"arguments": {"foo": "&"}})
- assert self.get(url='/?foo=%26')['status'] == 200
- assert self.get(url='/?foo=%26&')['status'] == 200
- assert self.get(url='/?foo=%26%26')['status'] == 404
- assert self.get(url='/?foo=&')['status'] == 404
-
- self.route_match({"arguments": {"&": ""}})
- assert self.get(url='/?%26=')['status'] == 200
- assert self.get(url='/?%26=&')['status'] == 200
- assert self.get(url='/?%26=%26')['status'] == 404
- assert self.get(url='/?&=')['status'] == 404
-
- def test_routes_match_arguments_complex(self):
- self.route_match({"arguments": {"foo": ""}})
-
- assert self.get(url='/?foo')['status'] == 200, 'complex'
- assert self.get(url='/?blah=blah&foo=')['status'] == 200, 'complex 2'
- assert self.get(url='/?&&&foo&&&')['status'] == 200, 'complex 3'
- assert self.get(url='/?foo&foo=bar&foo')['status'] == 404, 'complex 4'
- assert self.get(url='/?foo=&foo')['status'] == 200, 'complex 5'
- assert self.get(url='/?&=&foo&==&')['status'] == 200, 'complex 6'
- assert self.get(url='/?&=&bar&==&')['status'] == 404, 'complex 7'
-
- def test_routes_match_arguments_multiple(self):
- self.route_match({"arguments": {"foo": "bar", "blah": "test"}})
-
- assert self.get()['status'] == 404, 'multiple'
- assert (
- self.get(url='/?foo=bar&blah=test')['status'] == 200
- ), 'multiple 2'
- assert self.get(url='/?foo=bar&blah')['status'] == 404, 'multiple 3'
- assert self.get(url='/?foo=bar&blah=tes')['status'] == 404, 'multiple 4'
- assert (
- self.get(url='/?foo=b%61r&bl%61h=t%65st')['status'] == 200
- ), 'multiple 5'
+ assert 'success' in client.conf_delete('routes/main/0'), 'redefine 5'
+ assert client.get()['status'] == 404, 'redefine request 5'
- def test_routes_match_arguments_multiple_rules(self):
- self.route_match({"arguments": {"foo": ["bar", "blah"]}})
+ assert 'success' in client.conf_post(
+ {"action": {"return": 200}}, 'routes/main'
+ ), 'redefine 6'
+ assert client.get()['status'] == 200, 'redefine request 6'
- assert self.get()['status'] == 404, 'rules'
- assert self.get(url='/?foo=bar')['status'] == 200, 'rules 2'
- assert self.get(url='/?foo=blah')['status'] == 200, 'rules 3'
- assert (
- self.get(url='/?foo=blah&foo=bar&foo=blah')['status'] == 200
- ), 'rules 4'
- assert (
- self.get(url='/?foo=blah&foo=bar&foo=')['status'] == 404
- ), 'rules 5'
+ assert 'error' in client.conf(
+ {"action": {"return": 200}}, 'routes/main/2'
+ ), 'redefine 7'
+ assert 'success' in client.conf(
+ {"action": {"return": 201}}, 'routes/main/1'
+ ), 'redefine 8'
- def test_routes_match_arguments_array(self):
- self.route_match(
- {
- "arguments": [
- {"var1": "val1*"},
- {"var2": "val2"},
- {"var3": ["foo", "bar"]},
- {"var1": "bar", "var4": "foo"},
- ]
+ assert len(client.conf_get('routes/main')) == 2, 'redefine conf 8'
+ assert client.get()['status'] == 200, 'redefine request 8'
+
+
+def test_routes_edit():
+ route_match({"method": "GET"})
+
+ assert client.get()['status'] == 200, 'routes edit GET'
+ assert client.post()['status'] == 404, 'routes edit POST'
+
+ assert 'success' in client.conf_post(
+ {"match": {"method": "POST"}, "action": {"return": 200}},
+ 'routes',
+ ), 'routes edit configure 2'
+ assert 'GET' == client.conf_get(
+ 'routes/0/match/method'
+ ), 'routes edit configure 2 check'
+ assert 'POST' == client.conf_get(
+ 'routes/1/match/method'
+ ), 'routes edit configure 2 check 2'
+
+ assert client.get()['status'] == 200, 'routes edit GET 2'
+ assert client.post()['status'] == 200, 'routes edit POST 2'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0'
+ ), 'routes edit configure 3'
+
+ assert client.get()['status'] == 404, 'routes edit GET 3'
+ assert client.post()['status'] == 200, 'routes edit POST 3'
+
+ assert 'error' in client.conf_delete(
+ 'routes/1'
+ ), 'routes edit configure invalid'
+ assert 'error' in client.conf_delete(
+ 'routes/-1'
+ ), 'routes edit configure invalid 2'
+ assert 'error' in client.conf_delete(
+ 'routes/blah'
+ ), 'routes edit configure invalid 3'
+
+ assert client.get()['status'] == 404, 'routes edit GET 4'
+ assert client.post()['status'] == 200, 'routes edit POST 4'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0'
+ ), 'routes edit configure 5'
+
+ assert client.get()['status'] == 404, 'routes edit GET 5'
+ assert client.post()['status'] == 404, 'routes edit POST 5'
+
+ assert 'success' in client.conf_post(
+ {"match": {"method": "POST"}, "action": {"return": 200}},
+ 'routes',
+ ), 'routes edit configure 6'
+
+ assert client.get()['status'] == 404, 'routes edit GET 6'
+ assert client.post()['status'] == 200, 'routes edit POST 6'
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/main"}},
+ "routes": {"main": [{"action": {"return": 200}}]},
+ "applications": {},
+ }
+ ), 'route edit configure 7'
+
+ assert 'error' in client.conf_delete(
+ 'routes/0'
+ ), 'routes edit configure invalid 4'
+ assert 'error' in client.conf_delete(
+ 'routes/main'
+ ), 'routes edit configure invalid 5'
+
+ assert client.get()['status'] == 200, 'routes edit GET 7'
+
+ assert 'success' in client.conf_delete(
+ 'listeners/*:7080'
+ ), 'route edit configure 8'
+ assert 'success' in client.conf_delete(
+ 'routes/main'
+ ), 'route edit configure 9'
+
+
+def test_match_edit(skip_alert):
+ skip_alert(r'failed to apply new conf')
+
+ route_match({"method": ["GET", "POST"]})
+
+ assert client.get()['status'] == 200, 'match edit GET'
+ assert client.post()['status'] == 200, 'match edit POST'
+ assert client.put()['status'] == 404, 'match edit PUT'
+
+ assert 'success' in client.conf_post(
+ '\"PUT\"', 'routes/0/match/method'
+ ), 'match edit configure 2'
+ assert ['GET', 'POST', 'PUT'] == client.conf_get(
+ 'routes/0/match/method'
+ ), 'match edit configure 2 check'
+
+ assert client.get()['status'] == 200, 'match edit GET 2'
+ assert client.post()['status'] == 200, 'match edit POST 2'
+ assert client.put()['status'] == 200, 'match edit PUT 2'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/method/1'
+ ), 'match edit configure 3'
+ assert ['GET', 'PUT'] == client.conf_get(
+ 'routes/0/match/method'
+ ), 'match edit configure 3 check'
+
+ assert client.get()['status'] == 200, 'match edit GET 3'
+ assert client.post()['status'] == 404, 'match edit POST 3'
+ assert client.put()['status'] == 200, 'match edit PUT 3'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/method/1'
+ ), 'match edit configure 4'
+ assert ['GET'] == client.conf_get(
+ 'routes/0/match/method'
+ ), 'match edit configure 4 check'
+
+ assert client.get()['status'] == 200, 'match edit GET 4'
+ assert client.post()['status'] == 404, 'match edit POST 4'
+ assert client.put()['status'] == 404, 'match edit PUT 4'
+
+ assert 'error' in client.conf_delete(
+ 'routes/0/match/method/1'
+ ), 'match edit configure invalid'
+ assert 'error' in client.conf_delete(
+ 'routes/0/match/method/-1'
+ ), 'match edit configure invalid 2'
+ assert 'error' in client.conf_delete(
+ 'routes/0/match/method/blah'
+ ), 'match edit configure invalid 3'
+ assert ['GET'] == client.conf_get(
+ 'routes/0/match/method'
+ ), 'match edit configure 5 check'
+
+ assert client.get()['status'] == 200, 'match edit GET 5'
+ assert client.post()['status'] == 404, 'match edit POST 5'
+ assert client.put()['status'] == 404, 'match edit PUT 5'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/method/0'
+ ), 'match edit configure 6'
+ assert [] == client.conf_get(
+ 'routes/0/match/method'
+ ), 'match edit configure 6 check'
+
+ assert client.get()['status'] == 200, 'match edit GET 6'
+ assert client.post()['status'] == 200, 'match edit POST 6'
+ assert client.put()['status'] == 200, 'match edit PUT 6'
+
+ assert 'success' in client.conf(
+ '"GET"', 'routes/0/match/method'
+ ), 'match edit configure 7'
+
+ assert client.get()['status'] == 200, 'match edit GET 7'
+ assert client.post()['status'] == 404, 'match edit POST 7'
+ assert client.put()['status'] == 404, 'match edit PUT 7'
+
+ assert 'error' in client.conf_delete(
+ 'routes/0/match/method/0'
+ ), 'match edit configure invalid 5'
+ assert 'error' in client.conf(
+ {}, 'routes/0/action'
+ ), 'match edit configure invalid 6'
+
+ assert 'success' in client.conf(
+ {}, 'routes/0/match'
+ ), 'match edit configure 8'
+
+ assert client.get()['status'] == 200, 'match edit GET 8'
+
+
+def test_routes_match_rules():
+ route_match({"method": "GET", "host": "localhost", "uri": "/"})
+
+ assert client.get()['status'] == 200, 'routes match rules'
+
+
+def test_routes_loop():
+ assert 'success' in route(
+ {"match": {"uri": "/"}, "action": {"pass": "routes"}}
+ ), 'routes loop configure'
+
+ assert client.get()['status'] == 500, 'routes loop'
+
+
+def test_routes_match_headers():
+ route_match({"headers": {"host": "localhost"}})
+
+ assert client.get()['status'] == 200, 'match headers'
+ host('Localhost', 200)
+ host('localhost.com', 404)
+ host('llocalhost', 404)
+ host('host', 404)
+
+
+def test_routes_match_headers_multiple():
+ route_match({"headers": {"host": "localhost", "x-blah": "test"}})
+
+ assert client.get()['status'] == 404, 'match headers multiple'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": "test",
+ "Connection": "close",
}
- )
+ )['status']
+ == 200
+ ), 'match headers multiple 2'
+
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": "",
+ "Connection": "close",
+ }
+ )['status']
+ == 404
+ ), 'match headers multiple 3'
- assert self.get()['status'] == 404, 'arr'
- assert self.get(url='/?var1=val123')['status'] == 200, 'arr 2'
- assert self.get(url='/?var2=val2')['status'] == 200, 'arr 3'
- assert self.get(url='/?var3=bar')['status'] == 200, 'arr 4'
- assert self.get(url='/?var1=bar')['status'] == 404, 'arr 5'
- assert self.get(url='/?var1=bar&var4=foo')['status'] == 200, 'arr 6'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/arguments/1'
- ), 'match arguments array configure 2'
-
- assert self.get(url='/?var2=val2')['status'] == 404, 'arr 7'
- assert self.get(url='/?var3=foo')['status'] == 200, 'arr 8'
-
- def test_routes_match_arguments_invalid(self):
- self.route_match_invalid({"arguments": ["var"]})
- self.route_match_invalid({"arguments": [{"var1": {}}]})
- self.route_match_invalid({"arguments": {"": "bar"}})
- self.route_match_invalid({"arguments": {"foo": "%"}})
- self.route_match_invalid({"arguments": {"foo": "%1G"}})
- self.route_match_invalid({"arguments": {"%": "bar"}})
- self.route_match_invalid({"arguments": {"foo": "%0"}})
- self.route_match_invalid({"arguments": {"foo": "%%1F"}})
- self.route_match_invalid({"arguments": {"%%1F": ""}})
- self.route_match_invalid({"arguments": {"%7%F": ""}})
-
- def test_routes_match_query(self):
- self.route_match({"query": "!"})
- assert self.get(url='/')['status'] == 404
- assert self.get(url='/?')['status'] == 404
- assert self.get(url='/?foo')['status'] == 200
- assert self.get(url='/?foo=')['status'] == 200
- assert self.get(url='/?foo=baz')['status'] == 200
-
- self.route_match({"query": "foo=%26"})
- assert self.get(url='/?foo=&')['status'] == 200
-
- self.route_match({"query": "a=b&c=d"})
- assert self.get(url='/?a=b&c=d')['status'] == 200
-
- self.route_match({"query": "a=b%26c%3Dd"})
- assert self.get(url='/?a=b%26c%3Dd')['status'] == 200
- assert self.get(url='/?a=b&c=d')['status'] == 200
-
- self.route_match({"query": "a=b%26c%3Dd+e"})
- assert self.get(url='/?a=b&c=d e')['status'] == 200
-
- def test_routes_match_query_array(self):
- self.route_match({"query": ["foo", "bar"]})
-
- assert self.get()['status'] == 404, 'no args'
- assert self.get(url='/?foo')['status'] == 200, 'arg first'
- assert self.get(url='/?bar')['status'] == 200, 'arg second'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/query/1'
- ), 'query array remove second'
-
- assert self.get(url='/?foo')['status'] == 200, 'still arg first'
- assert self.get(url='/?bar')['status'] == 404, 'no arg second'
-
- self.route_match({"query": ["!f", "foo"]})
-
- assert self.get(url='/?f')['status'] == 404, 'negative arg'
- assert self.get(url='/?fo')['status'] == 404, 'negative arg 2'
- assert self.get(url='/?foo')['status'] == 200, 'negative arg 3'
-
- self.route_match({"query": []})
- assert self.get()['status'] == 200, 'empty array'
-
- def test_routes_match_query_invalid(self):
- self.route_match_invalid({"query": [1]})
- self.route_match_invalid({"query": "%"})
- self.route_match_invalid({"query": "%1G"})
- self.route_match_invalid({"query": "%0"})
- self.route_match_invalid({"query": "%%1F"})
- self.route_match_invalid({"query": ["foo", "%3D", "%%1F"]})
-
- def test_routes_match_cookies(self):
- self.route_match({"cookies": {"foO": "bar"}})
-
- assert self.get()['status'] == 404, 'cookie'
- self.cookie('foO=bar', 200)
- self.cookie('foO=bar;1', 200)
- self.cookie(['foO=bar', 'blah=blah'], 200)
- self.cookie('foO=bar; blah=blah', 200)
- self.cookie('Foo=bar', 404)
- self.cookie('foO=Bar', 404)
- self.cookie('foO=bar1', 404)
- self.cookie('1foO=bar;', 404)
-
- def test_routes_match_cookies_empty(self):
- self.route_match({"cookies": {}})
- assert self.get()['status'] == 200, 'cookies empty'
-
- self.route_match({"cookies": []})
- assert self.get()['status'] == 200, 'cookies empty 2'
-
- def test_routes_match_cookies_invalid(self):
- self.route_match_invalid({"cookies": ["var"]})
- self.route_match_invalid({"cookies": [{"foo": {}}]})
-
- def test_routes_match_cookies_complex(self):
- self.route_match({"cookies": {"foo": "bar=baz"}})
- self.cookie('foo=bar=baz', 200)
- self.cookie(' foo=bar=baz ', 200)
- self.cookie('=foo=bar=baz', 404)
-
- self.route_match({"cookies": {"foo": ""}})
- self.cookie('foo=', 200)
- self.cookie('foo=;', 200)
- self.cookie(' foo=;', 200)
- self.cookie('foo', 404)
- self.cookie('', 404)
- self.cookie('=', 404)
-
- def test_routes_match_cookies_multiple(self):
- self.route_match({"cookies": {"foo": "bar", "blah": "blah"}})
-
- assert self.get()['status'] == 404, 'multiple'
- self.cookie('foo=bar; blah=blah', 200)
- self.cookie(['foo=bar', 'blah=blah'], 200)
- self.cookie(['foo=bar; blah', 'blah'], 404)
- self.cookie(['foo=bar; blah=test', 'blah=blah'], 404)
-
- def test_routes_match_cookies_multiple_values(self):
- self.route_match({"cookies": {"blah": "blah"}})
-
- self.cookie(['blah=blah', 'blah=blah', 'blah=blah'], 200)
- self.cookie(['blah=blah', 'blah=test', 'blah=blah'], 404)
- self.cookie(['blah=blah; blah=', 'blah=blah'], 404)
-
- def test_routes_match_cookies_multiple_rules(self):
- self.route_match({"cookies": {"blah": ["test", "blah"]}})
-
- assert self.get()['status'] == 404, 'multiple rules'
- self.cookie('blah=test', 200)
- self.cookie('blah=blah', 200)
- self.cookie(['blah=blah', 'blah=test', 'blah=blah'], 200)
- self.cookie(['blah=blah; blah=test', 'blah=blah'], 200)
- self.cookie(['blah=blah', 'blah'], 200) # invalid cookie
-
- def test_routes_match_cookies_array(self):
- self.route_match(
- {
- "cookies": [
- {"var1": "val1*"},
- {"var2": "val2"},
- {"var3": ["foo", "bar"]},
- {"var1": "bar", "var4": "foo"},
- ]
+
+def test_routes_match_headers_multiple_values():
+ route_match({"headers": {"x-blah": "test"}})
+
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": ["test", "test", "test"],
+ "Connection": "close",
}
- )
+ )['status']
+ == 200
+ ), 'match headers multiple values'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": ["test", "blah", "test"],
+ "Connection": "close",
+ }
+ )['status']
+ == 404
+ ), 'match headers multiple values 2'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": ["test", "", "test"],
+ "Connection": "close",
+ }
+ )['status']
+ == 404
+ ), 'match headers multiple values 3'
- assert self.get()['status'] == 404, 'cookies array'
- self.cookie('var1=val123', 200)
- self.cookie('var2=val2', 200)
- self.cookie(' var2=val2 ', 200)
- self.cookie('var3=bar', 200)
- self.cookie('var3=bar;', 200)
- self.cookie('var1=bar', 404)
- self.cookie('var1=bar; var4=foo;', 200)
- self.cookie(['var1=bar', 'var4=foo'], 200)
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/cookies/1'
- ), 'match cookies array configure 2'
-
- self.cookie('var2=val2', 404)
- self.cookie('var3=foo', 200)
-
- def test_routes_match_scheme(self):
- self.route_match({"scheme": "http"})
- self.route_match({"scheme": "https"})
- self.route_match({"scheme": "HtTp"})
- self.route_match({"scheme": "HtTpS"})
-
- def test_routes_match_scheme_invalid(self):
- self.route_match_invalid({"scheme": ["http"]})
- self.route_match_invalid({"scheme": "ftp"})
- self.route_match_invalid({"scheme": "ws"})
- self.route_match_invalid({"scheme": "*"})
- self.route_match_invalid({"scheme": ""})
-
- def test_routes_source_port(self):
- def sock_port():
- sock = self.http(b'', raw=True, no_recv=True)
- port = sock.getsockname()[1]
- return (sock, port)
-
- sock, port = sock_port()
- sock2, port2 = sock_port()
-
- self.route_match({"source": f'127.0.0.1:{port}'})
- assert self.get(sock=sock)['status'] == 200, 'exact'
- assert self.get(sock=sock2)['status'] == 404, 'exact 2'
-
- sock, port = sock_port()
- sock2, port2 = sock_port()
-
- self.route_match({"source": f'!127.0.0.1:{port}'})
- assert self.get(sock=sock)['status'] == 404, 'negative'
- assert self.get(sock=sock2)['status'] == 200, 'negative 2'
-
- sock, port = sock_port()
- sock2, port2 = sock_port()
-
- self.route_match({"source": [f'*:{port}', "!127.0.0.1"]})
- assert self.get(sock=sock)['status'] == 404, 'negative 3'
- assert self.get(sock=sock2)['status'] == 404, 'negative 4'
-
- sock, port = sock_port()
- sock2, port2 = sock_port()
-
- self.route_match({"source": f'127.0.0.1:{port}-{port}'})
- assert self.get(sock=sock)['status'] == 200, 'range single'
- assert self.get(sock=sock2)['status'] == 404, 'range single 2'
-
- socks = [
- sock_port(),
- sock_port(),
- sock_port(),
- sock_port(),
- sock_port(),
- ]
- socks.sort(key=lambda sock: sock[1])
-
- self.route_match({"source": f'127.0.0.1:{socks[1][1]}-{socks[3][1]}'})
- assert self.get(sock=socks[0][0])['status'] == 404, 'range'
- assert self.get(sock=socks[1][0])['status'] == 200, 'range 2'
- assert self.get(sock=socks[2][0])['status'] == 200, 'range 3'
- assert self.get(sock=socks[3][0])['status'] == 200, 'range 4'
- assert self.get(sock=socks[4][0])['status'] == 404, 'range 5'
-
- socks = [
- sock_port(),
- sock_port(),
- sock_port(),
- ]
- socks.sort(key=lambda sock: sock[1])
-
- self.route_match(
- {
- "source": [
- f'127.0.0.1:{socks[0][1]}',
- f'127.0.0.1:{socks[2][1]}',
- ]
+
+def test_routes_match_headers_multiple_rules():
+ route_match({"headers": {"x-blah": ["test", "blah"]}})
+
+ assert client.get()['status'] == 404, 'match headers multiple rules'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": "test",
+ "Connection": "close",
}
- )
- assert self.get(sock=socks[0][0])['status'] == 200, 'array'
- assert self.get(sock=socks[1][0])['status'] == 404, 'array 2'
- assert self.get(sock=socks[2][0])['status'] == 200, 'array 3'
+ )['status']
+ == 200
+ ), 'match headers multiple rules 2'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": "blah",
+ "Connection": "close",
+ }
+ )['status']
+ == 200
+ ), 'match headers multiple rules 3'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": ["test", "blah", "test"],
+ "Connection": "close",
+ }
+ )['status']
+ == 200
+ ), 'match headers multiple rules 4'
+
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "X-blah": ["blah", ""],
+ "Connection": "close",
+ }
+ )['status']
+ == 404
+ ), 'match headers multiple rules 5'
- def test_routes_source_addr(self):
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "routes"},
- "[::1]:7081": {"pass": "routes"},
- },
- 'listeners',
- ), 'source listeners configure'
- def get_ipv6():
- return self.get(sock_type='ipv6', port=7081)
+def test_routes_match_headers_case_insensitive():
+ route_match({"headers": {"X-BLAH": "TEST"}})
- self.route_match({"source": "127.0.0.1"})
- assert self.get()['status'] == 200, 'exact'
- assert get_ipv6()['status'] == 404, 'exact ipv6'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "x-blah": "test",
+ "Connection": "close",
+ }
+ )['status']
+ == 200
+ ), 'match headers case insensitive'
- self.route_match({"source": ["127.0.0.1"]})
- assert self.get()['status'] == 200, 'exact 2'
- assert get_ipv6()['status'] == 404, 'exact 2 ipv6'
- self.route_match({"source": "!127.0.0.1"})
- assert self.get()['status'] == 404, 'exact neg'
- assert get_ipv6()['status'] == 200, 'exact neg ipv6'
+def test_routes_match_headers_invalid():
+ route_match_invalid({"headers": ["blah"]})
+ route_match_invalid({"headers": {"foo": ["bar", {}]}})
+ route_match_invalid({"headers": {"": "blah"}})
- self.route_match({"source": "127.0.0.2"})
- assert self.get()['status'] == 404, 'exact 3'
- assert get_ipv6()['status'] == 404, 'exact 3 ipv6'
- self.route_match({"source": "127.0.0.1-127.0.0.1"})
- assert self.get()['status'] == 200, 'range single'
- assert get_ipv6()['status'] == 404, 'range single ipv6'
+def test_routes_match_headers_empty_rule():
+ route_match({"headers": {"host": ""}})
- self.route_match({"source": "127.0.0.2-127.0.0.2"})
- assert self.get()['status'] == 404, 'range single 2'
- assert get_ipv6()['status'] == 404, 'range single 2 ipv6'
+ assert client.get()['status'] == 404, 'localhost'
+ host('', 200)
- self.route_match({"source": "127.0.0.2-127.0.0.3"})
- assert self.get()['status'] == 404, 'range'
- assert get_ipv6()['status'] == 404, 'range ipv6'
- self.route_match({"source": "127.0.0.1-127.0.0.2"})
- assert self.get()['status'] == 200, 'range 2'
- assert get_ipv6()['status'] == 404, 'range 2 ipv6'
+def test_routes_match_headers_empty():
+ route_match({"headers": {}})
+ assert client.get()['status'] == 200, 'empty'
- self.route_match({"source": "127.0.0.0-127.0.0.2"})
- assert self.get()['status'] == 200, 'range 3'
- assert get_ipv6()['status'] == 404, 'range 3 ipv6'
+ route_match({"headers": []})
+ assert client.get()['status'] == 200, 'empty 2'
- self.route_match({"source": "127.0.0.0-127.0.0.1"})
- assert self.get()['status'] == 200, 'range 4'
- assert get_ipv6()['status'] == 404, 'range 4 ipv6'
- self.route_match({"source": "126.0.0.0-127.0.0.0"})
- assert self.get()['status'] == 404, 'range 5'
- assert get_ipv6()['status'] == 404, 'range 5 ipv6'
+def test_routes_match_headers_rule_array_empty():
+ route_match({"headers": {"blah": []}})
- self.route_match({"source": "126.126.126.126-127.0.0.2"})
- assert self.get()['status'] == 200, 'range 6'
- assert get_ipv6()['status'] == 404, 'range 6 ipv6'
+ assert client.get()['status'] == 404, 'array empty'
+ assert (
+ client.get(
+ headers={
+ "Host": "localhost",
+ "blah": "foo",
+ "Connection": "close",
+ }
+ )['status']
+ == 200
+ ), 'match headers rule array empty 2'
+
+
+def test_routes_match_headers_array():
+ route_match(
+ {
+ "headers": [
+ {"x-header1": "foo*"},
+ {"x-header2": "bar"},
+ {"x-header3": ["foo", "bar"]},
+ {"x-header1": "bar", "x-header4": "foo"},
+ ]
+ }
+ )
+
+ def check_headers(hds):
+ hds = dict({"Host": "localhost", "Connection": "close"}, **hds)
+ assert client.get(headers=hds)['status'] == 200, 'headers array match'
+
+ def check_headers_404(hds):
+ hds = dict({"Host": "localhost", "Connection": "close"}, **hds)
+ assert (
+ client.get(headers=hds)['status'] == 404
+ ), 'headers array no match'
+
+ assert client.get()['status'] == 404, 'match headers array'
+ check_headers({"x-header1": "foo123"})
+ check_headers({"x-header2": "bar"})
+ check_headers({"x-header3": "bar"})
+ check_headers_404({"x-header1": "bar"})
+ check_headers({"x-header1": "bar", "x-header4": "foo"})
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/headers/1'
+ ), 'match headers array configure 2'
+
+ check_headers_404({"x-header2": "bar"})
+ check_headers({"x-header3": "foo"})
+
+
+def test_routes_match_arguments():
+ route_match({"arguments": {"foo": "bar"}})
+
+ assert client.get()['status'] == 404, 'args'
+ assert client.get(url='/?foo=bar')['status'] == 200, 'args 2'
+ assert client.get(url='/?foo=bar1')['status'] == 404, 'args 3'
+ assert client.get(url='/?1foo=bar')['status'] == 404, 'args 4'
+ assert client.get(url='/?Foo=bar')['status'] == 404, 'case'
+ assert client.get(url='/?foo=Bar')['status'] == 404, 'case 2'
+
+
+def test_routes_match_arguments_chars():
+ chars = (
+ " !\"%23$%25%26'()*%2B,-./0123456789:;<%3D>?@"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
+ )
+
+ chars_enc = ""
+ for h1 in ["2", "3", "4", "5", "6", "7"]:
+ for h2 in [
+ "0",
+ "1",
+ "2",
+ "3",
+ "4",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "A",
+ "B",
+ "C",
+ "D",
+ "E",
+ "F",
+ ]:
+ chars_enc += f'%{h1}{h2}'
+ chars_enc = chars_enc[:-3]
+
+ def check_args(args, query):
+ route_match({"arguments": args})
+ assert client.get(url=f'/?{query}')['status'] == 200
+
+ check_args({chars: chars}, f'{chars}={chars}')
+ check_args({chars: chars}, f'{chars}={chars_enc}')
+ check_args({chars: chars}, f'{chars_enc}={chars}')
+ check_args({chars: chars}, f'{chars_enc}={chars_enc}')
+ check_args({chars_enc: chars_enc}, f'{chars}={chars}')
+ check_args({chars_enc: chars_enc}, f'{chars}={chars_enc}')
+ check_args({chars_enc: chars_enc}, f'{chars_enc}={chars}')
+ check_args({chars_enc: chars_enc}, f'{chars_enc}={chars_enc}')
+
+
+def test_routes_match_arguments_empty():
+ route_match({"arguments": {}})
+ assert client.get()['status'] == 200, 'arguments empty'
+
+ route_match({"arguments": []})
+ assert client.get()['status'] == 200, 'arguments empty 2'
+
+
+def test_routes_match_arguments_space():
+ route_match({"arguments": {"+fo o%20": "%20b+a r"}})
+ assert client.get(url='/? fo o = b a r&')['status'] == 200
+ assert client.get(url='/?+fo+o+=+b+a+r&')['status'] == 200
+ assert client.get(url='/?%20fo%20o%20=%20b%20a%20r&')['status'] == 200
+
+ route_match({"arguments": {"%20foo": " bar"}})
+ assert client.get(url='/? foo= bar')['status'] == 200
+ assert client.get(url='/?+foo=+bar')['status'] == 200
+ assert client.get(url='/?%20foo=%20bar')['status'] == 200
+ assert client.get(url='/?+foo= bar')['status'] == 200
+ assert client.get(url='/?%20foo=+bar')['status'] == 200
+
+
+def test_routes_match_arguments_equal():
+ route_match({"arguments": {"=": "="}})
+ assert client.get(url='/?%3D=%3D')['status'] == 200
+ assert client.get(url='/?%3D==')['status'] == 200
+ assert client.get(url='/?===')['status'] == 404
+ assert client.get(url='/?%3D%3D%3D')['status'] == 404
+ assert client.get(url='/?==%3D')['status'] == 404
+
+
+def test_routes_match_arguments_enc():
+ route_match({"arguments": {"Ю": "н"}})
+ assert client.get(url='/?%D0%AE=%D0%BD')['status'] == 200
+ assert client.get(url='/?%d0%ae=%d0%Bd')['status'] == 200
+
+
+def test_routes_match_arguments_hash():
+ route_match({"arguments": {"#": "#"}})
+ assert client.get(url='/?%23=%23')['status'] == 200
+ assert client.get(url='/?%23=%23#')['status'] == 200
+ assert client.get(url='/?#=#')['status'] == 404
+ assert client.get(url='/?%23=#')['status'] == 404
+
+
+def test_routes_match_arguments_wildcard():
+ route_match({"arguments": {"foo": "*"}})
+ assert client.get(url='/?foo')['status'] == 200
+ assert client.get(url='/?foo=')['status'] == 200
+ assert client.get(url='/?foo=blah')['status'] == 200
+ assert client.get(url='/?blah=foo')['status'] == 404
+
+ route_match({"arguments": {"foo": "%25*"}})
+ assert client.get(url='/?foo=%xx')['status'] == 200
+
+ route_match({"arguments": {"foo": "%2A*"}})
+ assert client.get(url='/?foo=*xx')['status'] == 200
+ assert client.get(url='/?foo=xx')['status'] == 404
+
+ route_match({"arguments": {"foo": "*%2A"}})
+ assert client.get(url='/?foo=xx*')['status'] == 200
+ assert client.get(url='/?foo=xx*x')['status'] == 404
+
+ route_match({"arguments": {"foo": "1*2"}})
+ assert client.get(url='/?foo=12')['status'] == 200
+ assert client.get(url='/?foo=1blah2')['status'] == 200
+ assert client.get(url='/?foo=1%2A2')['status'] == 200
+ assert client.get(url='/?foo=x12')['status'] == 404
+
+ route_match({"arguments": {"foo": "bar*", "%25": "%25"}})
+ assert client.get(url='/?foo=barxx&%=%')['status'] == 200
+ assert client.get(url='/?foo=barxx&x%=%')['status'] == 404
+
+
+def test_routes_match_arguments_negative():
+ route_match({"arguments": {"foo": "!"}})
+ assert client.get(url='/?bar')['status'] == 404
+ assert client.get(url='/?foo')['status'] == 404
+ assert client.get(url='/?foo=')['status'] == 404
+ assert client.get(url='/?foo=%25')['status'] == 200
+
+ route_match({"arguments": {"foo": "!*"}})
+ assert client.get(url='/?bar')['status'] == 404
+ assert client.get(url='/?foo')['status'] == 404
+ assert client.get(url='/?foo=')['status'] == 404
+ assert client.get(url='/?foo=blah')['status'] == 404
+
+ route_match({"arguments": {"foo": "!%25"}})
+ assert client.get(url='/?foo=blah')['status'] == 200
+ assert client.get(url='/?foo=%')['status'] == 404
+
+ route_match({"arguments": {"foo": "%21blah"}})
+ assert client.get(url='/?foo=%21blah')['status'] == 200
+ assert client.get(url='/?foo=!blah')['status'] == 200
+ assert client.get(url='/?foo=bar')['status'] == 404
+
+ route_match({"arguments": {"foo": "!!%21*a"}})
+ assert client.get(url='/?foo=blah')['status'] == 200
+ assert client.get(url='/?foo=!blah')['status'] == 200
+ assert client.get(url='/?foo=!!a')['status'] == 404
+ assert client.get(url='/?foo=!!bla')['status'] == 404
+
+
+def test_routes_match_arguments_percent():
+ route_match({"arguments": {"%25": "%25"}})
+ assert client.get(url='/?%=%')['status'] == 200
+ assert client.get(url='/?%25=%25')['status'] == 200
+ assert client.get(url='/?%25=%')['status'] == 200
+
+ route_match({"arguments": {"%251": "%252"}})
+ assert client.get(url='/?%1=%2')['status'] == 200
+ assert client.get(url='/?%251=%252')['status'] == 200
+ assert client.get(url='/?%251=%2')['status'] == 200
+
+ route_match({"arguments": {"%25%21%251": "%25%24%252"}})
+ assert client.get(url='/?%!%1=%$%2')['status'] == 200
+ assert client.get(url='/?%25!%251=%25$%252')['status'] == 200
+ assert client.get(url='/?%25!%1=%$%2')['status'] == 200
+
+
+def test_routes_match_arguments_ampersand():
+ route_match({"arguments": {"foo": "&"}})
+ assert client.get(url='/?foo=%26')['status'] == 200
+ assert client.get(url='/?foo=%26&')['status'] == 200
+ assert client.get(url='/?foo=%26%26')['status'] == 404
+ assert client.get(url='/?foo=&')['status'] == 404
+
+ route_match({"arguments": {"&": ""}})
+ assert client.get(url='/?%26=')['status'] == 200
+ assert client.get(url='/?%26=&')['status'] == 200
+ assert client.get(url='/?%26=%26')['status'] == 404
+ assert client.get(url='/?&=')['status'] == 404
+
+
+def test_routes_match_arguments_complex():
+ route_match({"arguments": {"foo": ""}})
+
+ assert client.get(url='/?foo')['status'] == 200, 'complex'
+ assert client.get(url='/?blah=blah&foo=')['status'] == 200, 'complex 2'
+ assert client.get(url='/?&&&foo&&&')['status'] == 200, 'complex 3'
+ assert client.get(url='/?foo&foo=bar&foo')['status'] == 404, 'complex 4'
+ assert client.get(url='/?foo=&foo')['status'] == 200, 'complex 5'
+ assert client.get(url='/?&=&foo&==&')['status'] == 200, 'complex 6'
+ assert client.get(url='/?&=&bar&==&')['status'] == 404, 'complex 7'
+
+
+def test_routes_match_arguments_multiple():
+ route_match({"arguments": {"foo": "bar", "blah": "test"}})
+
+ assert client.get()['status'] == 404, 'multiple'
+ assert client.get(url='/?foo=bar&blah=test')['status'] == 200, 'multiple 2'
+ assert client.get(url='/?foo=bar&blah')['status'] == 404, 'multiple 3'
+ assert client.get(url='/?foo=bar&blah=tes')['status'] == 404, 'multiple 4'
+ assert (
+ client.get(url='/?foo=b%61r&bl%61h=t%65st')['status'] == 200
+ ), 'multiple 5'
+
+
+def test_routes_match_arguments_multiple_rules():
+ route_match({"arguments": {"foo": ["bar", "blah"]}})
+
+ assert client.get()['status'] == 404, 'rules'
+ assert client.get(url='/?foo=bar')['status'] == 200, 'rules 2'
+ assert client.get(url='/?foo=blah')['status'] == 200, 'rules 3'
+ assert (
+ client.get(url='/?foo=blah&foo=bar&foo=blah')['status'] == 200
+ ), 'rules 4'
+ assert client.get(url='/?foo=blah&foo=bar&foo=')['status'] == 404, 'rules 5'
+
+
+def test_routes_match_arguments_array():
+ route_match(
+ {
+ "arguments": [
+ {"var1": "val1*"},
+ {"var2": "val2"},
+ {"var3": ["foo", "bar"]},
+ {"var1": "bar", "var4": "foo"},
+ ]
+ }
+ )
+
+ assert client.get()['status'] == 404, 'arr'
+ assert client.get(url='/?var1=val123')['status'] == 200, 'arr 2'
+ assert client.get(url='/?var2=val2')['status'] == 200, 'arr 3'
+ assert client.get(url='/?var3=bar')['status'] == 200, 'arr 4'
+ assert client.get(url='/?var1=bar')['status'] == 404, 'arr 5'
+ assert client.get(url='/?var1=bar&var4=foo')['status'] == 200, 'arr 6'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/arguments/1'
+ ), 'match arguments array configure 2'
- def test_routes_source_ipv6(self):
- assert 'success' in self.conf(
- {
- "[::1]:7080": {"pass": "routes"},
- "127.0.0.1:7081": {"pass": "routes"},
- },
- 'listeners',
- ), 'source listeners configure'
+ assert client.get(url='/?var2=val2')['status'] == 404, 'arr 7'
+ assert client.get(url='/?var3=foo')['status'] == 200, 'arr 8'
- self.route_match({"source": "::1"})
- assert self.get(sock_type='ipv6')['status'] == 200, 'exact'
- assert self.get(port=7081)['status'] == 404, 'exact ipv4'
- self.route_match({"source": ["::1"]})
- assert self.get(sock_type='ipv6')['status'] == 200, 'exact 2'
- assert self.get(port=7081)['status'] == 404, 'exact 2 ipv4'
+def test_routes_match_arguments_invalid():
+ route_match_invalid({"arguments": ["var"]})
+ route_match_invalid({"arguments": [{"var1": {}}]})
+ route_match_invalid({"arguments": {"": "bar"}})
+ route_match_invalid({"arguments": {"foo": "%"}})
+ route_match_invalid({"arguments": {"foo": "%1G"}})
+ route_match_invalid({"arguments": {"%": "bar"}})
+ route_match_invalid({"arguments": {"foo": "%0"}})
+ route_match_invalid({"arguments": {"foo": "%%1F"}})
+ route_match_invalid({"arguments": {"%%1F": ""}})
+ route_match_invalid({"arguments": {"%7%F": ""}})
+
- self.route_match({"source": "!::1"})
- assert self.get(sock_type='ipv6')['status'] == 404, 'exact neg'
- assert self.get(port=7081)['status'] == 200, 'exact neg ipv4'
+def test_routes_match_query():
+ route_match({"query": "!"})
+ assert client.get(url='/')['status'] == 404
+ assert client.get(url='/?')['status'] == 404
+ assert client.get(url='/?foo')['status'] == 200
+ assert client.get(url='/?foo=')['status'] == 200
+ assert client.get(url='/?foo=baz')['status'] == 200
- self.route_match({"source": "::2"})
- assert self.get(sock_type='ipv6')['status'] == 404, 'exact 3'
- assert self.get(port=7081)['status'] == 404, 'exact 3 ipv4'
+ route_match({"query": "foo=%26"})
+ assert client.get(url='/?foo=&')['status'] == 200
- self.route_match({"source": "::1-::1"})
- assert self.get(sock_type='ipv6')['status'] == 200, 'range'
- assert self.get(port=7081)['status'] == 404, 'range ipv4'
+ route_match({"query": "a=b&c=d"})
+ assert client.get(url='/?a=b&c=d')['status'] == 200
- self.route_match({"source": "::2-::2"})
- assert self.get(sock_type='ipv6')['status'] == 404, 'range 2'
- assert self.get(port=7081)['status'] == 404, 'range 2 ipv4'
+ route_match({"query": "a=b%26c%3Dd"})
+ assert client.get(url='/?a=b%26c%3Dd')['status'] == 200
+ assert client.get(url='/?a=b&c=d')['status'] == 200
- self.route_match({"source": "::2-::3"})
- assert self.get(sock_type='ipv6')['status'] == 404, 'range 3'
- assert self.get(port=7081)['status'] == 404, 'range 3 ipv4'
+ route_match({"query": "a=b%26c%3Dd+e"})
+ assert client.get(url='/?a=b&c=d e')['status'] == 200
- self.route_match({"source": "::1-::2"})
- assert self.get(sock_type='ipv6')['status'] == 200, 'range 4'
- assert self.get(port=7081)['status'] == 404, 'range 4 ipv4'
- self.route_match({"source": "::0-::2"})
- assert self.get(sock_type='ipv6')['status'] == 200, 'range 5'
- assert self.get(port=7081)['status'] == 404, 'range 5 ipv4'
+def test_routes_match_query_array():
+ route_match({"query": ["foo", "bar"]})
- self.route_match({"source": "::0-::1"})
- assert self.get(sock_type='ipv6')['status'] == 200, 'range 6'
- assert self.get(port=7081)['status'] == 404, 'range 6 ipv4'
+ assert client.get()['status'] == 404, 'no args'
+ assert client.get(url='/?foo')['status'] == 200, 'arg first'
+ assert client.get(url='/?bar')['status'] == 200, 'arg second'
- def test_routes_source_cidr(self):
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "routes"},
- "[::1]:7081": {"pass": "routes"},
- },
- 'listeners',
- ), 'source listeners configure'
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/query/1'
+ ), 'query array remove second'
- def get_ipv6():
- return self.get(sock_type='ipv6', port=7081)
+ assert client.get(url='/?foo')['status'] == 200, 'still arg first'
+ assert client.get(url='/?bar')['status'] == 404, 'no arg second'
- self.route_match({"source": "127.0.0.1/32"})
- assert self.get()['status'] == 200, '32'
- assert get_ipv6()['status'] == 404, '32 ipv6'
+ route_match({"query": ["!f", "foo"]})
- self.route_match({"source": "127.0.0.0/32"})
- assert self.get()['status'] == 404, '32 2'
- assert get_ipv6()['status'] == 404, '32 2 ipv6'
+ assert client.get(url='/?f')['status'] == 404, 'negative arg'
+ assert client.get(url='/?fo')['status'] == 404, 'negative arg 2'
+ assert client.get(url='/?foo')['status'] == 200, 'negative arg 3'
+
+ route_match({"query": []})
+ assert client.get()['status'] == 200, 'empty array'
+
+
+def test_routes_match_query_invalid():
+ route_match_invalid({"query": [1]})
+ route_match_invalid({"query": "%"})
+ route_match_invalid({"query": "%1G"})
+ route_match_invalid({"query": "%0"})
+ route_match_invalid({"query": "%%1F"})
+ route_match_invalid({"query": ["foo", "%3D", "%%1F"]})
+
+
+def test_routes_match_cookies():
+ route_match({"cookies": {"foO": "bar"}})
- self.route_match({"source": "127.0.0.0/31"})
- assert self.get()['status'] == 200, '31'
- assert get_ipv6()['status'] == 404, '31 ipv6'
+ assert client.get()['status'] == 404, 'cookie'
+ cookie('foO=bar', 200)
+ cookie('foO=bar;1', 200)
+ cookie(['foO=bar', 'blah=blah'], 200)
+ cookie('foO=bar; blah=blah', 200)
+ cookie('Foo=bar', 404)
+ cookie('foO=Bar', 404)
+ cookie('foO=bar1', 404)
+ cookie('1foO=bar;', 404)
- self.route_match({"source": "0.0.0.0/1"})
- assert self.get()['status'] == 200, '1'
- assert get_ipv6()['status'] == 404, '1 ipv6'
- self.route_match({"source": "0.0.0.0/0"})
- assert self.get()['status'] == 200, '0'
- assert get_ipv6()['status'] == 404, '0 ipv6'
+def test_routes_match_cookies_empty():
+ route_match({"cookies": {}})
+ assert client.get()['status'] == 200, 'cookies empty'
- def test_routes_source_cidr_ipv6(self):
- assert 'success' in self.conf(
- {
- "[::1]:7080": {"pass": "routes"},
- "127.0.0.1:7081": {"pass": "routes"},
- },
- 'listeners',
- ), 'source listeners configure'
+ route_match({"cookies": []})
+ assert client.get()['status'] == 200, 'cookies empty 2'
- self.route_match({"source": "::1/128"})
- assert self.get(sock_type='ipv6')['status'] == 200, '128'
- assert self.get(port=7081)['status'] == 404, '128 ipv4'
- self.route_match({"source": "::0/128"})
- assert self.get(sock_type='ipv6')['status'] == 404, '128 2'
- assert self.get(port=7081)['status'] == 404, '128 ipv4'
+def test_routes_match_cookies_invalid():
+ route_match_invalid({"cookies": ["var"]})
+ route_match_invalid({"cookies": [{"foo": {}}]})
- self.route_match({"source": "::0/127"})
- assert self.get(sock_type='ipv6')['status'] == 200, '127'
- assert self.get(port=7081)['status'] == 404, '127 ipv4'
- self.route_match({"source": "::0/32"})
- assert self.get(sock_type='ipv6')['status'] == 200, '32'
- assert self.get(port=7081)['status'] == 404, '32 ipv4'
+def test_routes_match_cookies_complex():
+ route_match({"cookies": {"foo": "bar=baz"}})
+ cookie('foo=bar=baz', 200)
+ cookie(' foo=bar=baz ', 200)
+ cookie('=foo=bar=baz', 404)
- self.route_match({"source": "::0/1"})
- assert self.get(sock_type='ipv6')['status'] == 200, '1'
- assert self.get(port=7081)['status'] == 404, '1 ipv4'
+ route_match({"cookies": {"foo": ""}})
+ cookie('foo=', 200)
+ cookie('foo=;', 200)
+ cookie(' foo=;', 200)
+ cookie('foo', 404)
+ cookie('', 404)
+ cookie('=', 404)
- self.route_match({"source": "::/0"})
- assert self.get(sock_type='ipv6')['status'] == 200, '0'
- assert self.get(port=7081)['status'] == 404, '0 ipv4'
- def test_routes_source_unix(self, temp_dir):
- addr = f'{temp_dir}/sock'
+def test_routes_match_cookies_multiple():
+ route_match({"cookies": {"foo": "bar", "blah": "blah"}})
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {"pass": "routes"},
- f'unix:{addr}': {"pass": "routes"},
- },
- 'listeners',
- ), 'source listeners configure'
+ assert client.get()['status'] == 404, 'multiple'
+ cookie('foo=bar; blah=blah', 200)
+ cookie(['foo=bar', 'blah=blah'], 200)
+ cookie(['foo=bar; blah', 'blah'], 404)
+ cookie(['foo=bar; blah=test', 'blah=blah'], 404)
- self.route_match({"source": "!0.0.0.0/0"})
- assert (
- self.get(sock_type='unix', addr=addr)['status'] == 200
- ), 'unix ipv4 neg'
- self.route_match({"source": "!::/0"})
- assert (
- self.get(sock_type='unix', addr=addr)['status'] == 200
- ), 'unix ipv6 neg'
+def test_routes_match_cookies_multiple_values():
+ route_match({"cookies": {"blah": "blah"}})
- self.route_match({"source": "unix"})
- assert self.get(port=7081)['status'] == 404, 'unix ipv4'
- assert self.get(sock_type='unix', addr=addr)['status'] == 200, 'unix'
+ cookie(['blah=blah', 'blah=blah', 'blah=blah'], 200)
+ cookie(['blah=blah', 'blah=test', 'blah=blah'], 404)
+ cookie(['blah=blah; blah=', 'blah=blah'], 404)
- def test_routes_match_source(self):
- self.route_match({"source": "::"})
- self.route_match(
- {
- "source": [
- "127.0.0.1",
- "192.168.0.10:8080",
- "192.168.0.11:8080-8090",
- ]
- }
- )
- self.route_match(
- {
- "source": [
- "10.0.0.0/8",
- "10.0.0.0/7:1000",
- "10.0.0.0/32:8080-8090",
- ]
- }
- )
- self.route_match(
- {
- "source": [
- "10.0.0.0-10.0.0.1",
- "10.0.0.0-11.0.0.0:1000",
- "127.0.0.0-127.0.0.255:8080-8090",
- ]
- }
- )
- self.route_match(
- {"source": ["2001::", "[2002::]:8000", "[2003::]:8080-8090"]}
- )
- self.route_match(
- {
- "source": [
- "2001::-200f:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
- "[fe08::-feff::]:8000",
- "[fff0::-fff0::10]:8080-8090",
- ]
- }
- )
- self.route_match(
- {
- "source": [
- "2001::/16",
- "[0ff::/64]:8000",
- "[fff0:abcd:ffff:ffff:ffff::/128]:8080-8090",
- ]
- }
- )
- self.route_match({"source": "*:0-65535"})
- assert self.get()['status'] == 200, 'source any'
-
- def test_routes_match_source_invalid(self):
- self.route_match_invalid({"source": "127"})
- self.route_match_invalid({"source": "256.0.0.1"})
- self.route_match_invalid({"source": "127.0.0."})
- self.route_match_invalid({"source": " 127.0.0.1"})
- self.route_match_invalid({"source": "127.0.0.1:"})
- self.route_match_invalid({"source": "127.0.0.1/"})
- self.route_match_invalid({"source": "11.0.0.0/33"})
- self.route_match_invalid({"source": "11.0.0.0/65536"})
- self.route_match_invalid({"source": "11.0.0.0-10.0.0.0"})
- self.route_match_invalid({"source": "11.0.0.0:3000-2000"})
- self.route_match_invalid({"source": ["11.0.0.0:3000-2000"]})
- self.route_match_invalid({"source": "[2001::]:3000-2000"})
- self.route_match_invalid({"source": "2001::-2000::"})
- self.route_match_invalid({"source": "2001::/129"})
- self.route_match_invalid({"source": "::FFFFF"})
- self.route_match_invalid({"source": "[::1]:"})
- self.route_match_invalid({"source": "[:::]:7080"})
- self.route_match_invalid({"source": "*:"})
- self.route_match_invalid({"source": "*:1-a"})
- self.route_match_invalid({"source": "*:65536"})
-
- def test_routes_match_source_none(self):
- self.route_match({"source": []})
- assert self.get()['status'] == 404, 'source none'
-
- def test_routes_match_destination(self):
- assert 'success' in self.conf(
- {"*:7080": {"pass": "routes"}, "*:7081": {"pass": "routes"}},
- 'listeners',
- ), 'listeners configure'
-
- self.route_match({"destination": "*:7080"})
- assert self.get()['status'] == 200, 'dest'
- assert self.get(port=7081)['status'] == 404, 'dest 2'
-
- self.route_match({"destination": ["127.0.0.1:7080"]})
- assert self.get()['status'] == 200, 'dest 3'
- assert self.get(port=7081)['status'] == 404, 'dest 4'
-
- self.route_match({"destination": "!*:7080"})
- assert self.get()['status'] == 404, 'dest neg'
- assert self.get(port=7081)['status'] == 200, 'dest neg 2'
-
- self.route_match({"destination": ['!*:7080', '!*:7081']})
- assert self.get()['status'] == 404, 'dest neg 3'
- assert self.get(port=7081)['status'] == 404, 'dest neg 4'
-
- self.route_match({"destination": ['!*:7081', '!*:7082']})
- assert self.get()['status'] == 200, 'dest neg 5'
-
- self.route_match({"destination": ['*:7080', '!*:7080']})
- assert self.get()['status'] == 404, 'dest neg 6'
-
- self.route_match(
- {"destination": ['127.0.0.1:7080', '*:7081', '!*:7080']}
- )
- assert self.get()['status'] == 404, 'dest neg 7'
- assert self.get(port=7081)['status'] == 200, 'dest neg 8'
- self.route_match({"destination": ['!*:7081', '!*:7082', '*:7083']})
- assert self.get()['status'] == 404, 'dest neg 9'
+def test_routes_match_cookies_multiple_rules():
+ route_match({"cookies": {"blah": ["test", "blah"]}})
- self.route_match(
- {"destination": ['*:7081', '!127.0.0.1:7080', '*:7080']}
- )
- assert self.get()['status'] == 404, 'dest neg 10'
- assert self.get(port=7081)['status'] == 200, 'dest neg 11'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/destination/0'
- ), 'remove destination rule'
- assert self.get()['status'] == 404, 'dest neg 12'
- assert self.get(port=7081)['status'] == 404, 'dest neg 13'
-
- assert 'success' in self.conf_delete(
- 'routes/0/match/destination/0'
- ), 'remove destination rule 2'
- assert self.get()['status'] == 200, 'dest neg 14'
- assert self.get(port=7081)['status'] == 404, 'dest neg 15'
-
- assert 'success' in self.conf_post(
- "\"!127.0.0.1\"", 'routes/0/match/destination'
- ), 'add destination rule'
- assert self.get()['status'] == 404, 'dest neg 16'
- assert self.get(port=7081)['status'] == 404, 'dest neg 17'
-
- def test_routes_match_destination_proxy(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes/first"},
- "*:7081": {"pass": "routes/second"},
- },
- "routes": {
- "first": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
- "second": [
- {
- "match": {"destination": ["127.0.0.1:7081"]},
- "action": {"return": 200},
- }
- ],
- },
- "applications": {},
- }
- ), 'proxy configure'
+ assert client.get()['status'] == 404, 'multiple rules'
+ cookie('blah=test', 200)
+ cookie('blah=blah', 200)
+ cookie(['blah=blah', 'blah=test', 'blah=blah'], 200)
+ cookie(['blah=blah; blah=test', 'blah=blah'], 200)
+ cookie(['blah=blah', 'blah'], 200) # invalid cookie
+
+
+def test_routes_match_cookies_array():
+ route_match(
+ {
+ "cookies": [
+ {"var1": "val1*"},
+ {"var2": "val2"},
+ {"var3": ["foo", "bar"]},
+ {"var1": "bar", "var4": "foo"},
+ ]
+ }
+ )
+
+ assert client.get()['status'] == 404, 'cookies array'
+ cookie('var1=val123', 200)
+ cookie('var2=val2', 200)
+ cookie(' var2=val2 ', 200)
+ cookie('var3=bar', 200)
+ cookie('var3=bar;', 200)
+ cookie('var1=bar', 404)
+ cookie('var1=bar; var4=foo;', 200)
+ cookie(['var1=bar', 'var4=foo'], 200)
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/cookies/1'
+ ), 'match cookies array configure 2'
+
+ cookie('var2=val2', 404)
+ cookie('var3=foo', 200)
+
+
+def test_routes_match_scheme():
+ route_match({"scheme": "http"})
+ route_match({"scheme": "https"})
+ route_match({"scheme": "HtTp"})
+ route_match({"scheme": "HtTpS"})
+
+
+def test_routes_match_scheme_invalid():
+ route_match_invalid({"scheme": ["http"]})
+ route_match_invalid({"scheme": "ftp"})
+ route_match_invalid({"scheme": "ws"})
+ route_match_invalid({"scheme": "*"})
+ route_match_invalid({"scheme": ""})
+
+
+def test_routes_source_port():
+ def sock_port():
+ sock = client.http(b'', raw=True, no_recv=True)
+ port = sock.getsockname()[1]
+ return (sock, port)
+
+ sock, port = sock_port()
+ sock2, _ = sock_port()
+
+ route_match({"source": f'127.0.0.1:{port}'})
+ assert client.get(sock=sock)['status'] == 200, 'exact'
+ assert client.get(sock=sock2)['status'] == 404, 'exact 2'
+
+ sock, port = sock_port()
+ sock2, _ = sock_port()
+
+ route_match({"source": f'!127.0.0.1:{port}'})
+ assert client.get(sock=sock)['status'] == 404, 'negative'
+ assert client.get(sock=sock2)['status'] == 200, 'negative 2'
+
+ sock, port = sock_port()
+ sock2, _ = sock_port()
+
+ route_match({"source": [f'*:{port}', "!127.0.0.1"]})
+ assert client.get(sock=sock)['status'] == 404, 'negative 3'
+ assert client.get(sock=sock2)['status'] == 404, 'negative 4'
+
+ sock, port = sock_port()
+ sock2, _ = sock_port()
+
+ route_match({"source": f'127.0.0.1:{port}-{port}'})
+ assert client.get(sock=sock)['status'] == 200, 'range single'
+ assert client.get(sock=sock2)['status'] == 404, 'range single 2'
+
+ socks = [
+ sock_port(),
+ sock_port(),
+ sock_port(),
+ sock_port(),
+ sock_port(),
+ ]
+ socks.sort(key=lambda sock: sock[1])
+
+ route_match({"source": f'127.0.0.1:{socks[1][1]}-{socks[3][1]}'})
+ assert client.get(sock=socks[0][0])['status'] == 404, 'range'
+ assert client.get(sock=socks[1][0])['status'] == 200, 'range 2'
+ assert client.get(sock=socks[2][0])['status'] == 200, 'range 3'
+ assert client.get(sock=socks[3][0])['status'] == 200, 'range 4'
+ assert client.get(sock=socks[4][0])['status'] == 404, 'range 5'
+
+ socks = [
+ sock_port(),
+ sock_port(),
+ sock_port(),
+ ]
+ socks.sort(key=lambda sock: sock[1])
+
+ route_match(
+ {
+ "source": [
+ f'127.0.0.1:{socks[0][1]}',
+ f'127.0.0.1:{socks[2][1]}',
+ ]
+ }
+ )
+ assert client.get(sock=socks[0][0])['status'] == 200, 'array'
+ assert client.get(sock=socks[1][0])['status'] == 404, 'array 2'
+ assert client.get(sock=socks[2][0])['status'] == 200, 'array 3'
+
+
+def test_routes_source_addr():
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "routes"},
+ "[::1]:7081": {"pass": "routes"},
+ },
+ 'listeners',
+ ), 'source listeners configure'
+
+ def get_ipv6():
+ return client.get(sock_type='ipv6', port=7081)
+
+ route_match({"source": "127.0.0.1"})
+ assert client.get()['status'] == 200, 'exact'
+ assert get_ipv6()['status'] == 404, 'exact ipv6'
+
+ route_match({"source": ["127.0.0.1"]})
+ assert client.get()['status'] == 200, 'exact 2'
+ assert get_ipv6()['status'] == 404, 'exact 2 ipv6'
+
+ route_match({"source": "!127.0.0.1"})
+ assert client.get()['status'] == 404, 'exact neg'
+ assert get_ipv6()['status'] == 200, 'exact neg ipv6'
+
+ route_match({"source": "127.0.0.2"})
+ assert client.get()['status'] == 404, 'exact 3'
+ assert get_ipv6()['status'] == 404, 'exact 3 ipv6'
+
+ route_match({"source": "127.0.0.1-127.0.0.1"})
+ assert client.get()['status'] == 200, 'range single'
+ assert get_ipv6()['status'] == 404, 'range single ipv6'
+
+ route_match({"source": "127.0.0.2-127.0.0.2"})
+ assert client.get()['status'] == 404, 'range single 2'
+ assert get_ipv6()['status'] == 404, 'range single 2 ipv6'
+
+ route_match({"source": "127.0.0.2-127.0.0.3"})
+ assert client.get()['status'] == 404, 'range'
+ assert get_ipv6()['status'] == 404, 'range ipv6'
+
+ route_match({"source": "127.0.0.1-127.0.0.2"})
+ assert client.get()['status'] == 200, 'range 2'
+ assert get_ipv6()['status'] == 404, 'range 2 ipv6'
+
+ route_match({"source": "127.0.0.0-127.0.0.2"})
+ assert client.get()['status'] == 200, 'range 3'
+ assert get_ipv6()['status'] == 404, 'range 3 ipv6'
+
+ route_match({"source": "127.0.0.0-127.0.0.1"})
+ assert client.get()['status'] == 200, 'range 4'
+ assert get_ipv6()['status'] == 404, 'range 4 ipv6'
+
+ route_match({"source": "126.0.0.0-127.0.0.0"})
+ assert client.get()['status'] == 404, 'range 5'
+ assert get_ipv6()['status'] == 404, 'range 5 ipv6'
+
+ route_match({"source": "126.126.126.126-127.0.0.2"})
+ assert client.get()['status'] == 200, 'range 6'
+ assert get_ipv6()['status'] == 404, 'range 6 ipv6'
+
+
+def test_routes_source_ipv6():
+ assert 'success' in client.conf(
+ {
+ "[::1]:7080": {"pass": "routes"},
+ "127.0.0.1:7081": {"pass": "routes"},
+ },
+ 'listeners',
+ ), 'source listeners configure'
+
+ route_match({"source": "::1"})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'exact'
+ assert client.get(port=7081)['status'] == 404, 'exact ipv4'
+
+ route_match({"source": ["::1"]})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'exact 2'
+ assert client.get(port=7081)['status'] == 404, 'exact 2 ipv4'
+
+ route_match({"source": "!::1"})
+ assert client.get(sock_type='ipv6')['status'] == 404, 'exact neg'
+ assert client.get(port=7081)['status'] == 200, 'exact neg ipv4'
+
+ route_match({"source": "::2"})
+ assert client.get(sock_type='ipv6')['status'] == 404, 'exact 3'
+ assert client.get(port=7081)['status'] == 404, 'exact 3 ipv4'
+
+ route_match({"source": "::1-::1"})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'range'
+ assert client.get(port=7081)['status'] == 404, 'range ipv4'
+
+ route_match({"source": "::2-::2"})
+ assert client.get(sock_type='ipv6')['status'] == 404, 'range 2'
+ assert client.get(port=7081)['status'] == 404, 'range 2 ipv4'
+
+ route_match({"source": "::2-::3"})
+ assert client.get(sock_type='ipv6')['status'] == 404, 'range 3'
+ assert client.get(port=7081)['status'] == 404, 'range 3 ipv4'
+
+ route_match({"source": "::1-::2"})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'range 4'
+ assert client.get(port=7081)['status'] == 404, 'range 4 ipv4'
+
+ route_match({"source": "::0-::2"})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'range 5'
+ assert client.get(port=7081)['status'] == 404, 'range 5 ipv4'
+
+ route_match({"source": "::0-::1"})
+ assert client.get(sock_type='ipv6')['status'] == 200, 'range 6'
+ assert client.get(port=7081)['status'] == 404, 'range 6 ipv4'
+
+
+def test_routes_source_cidr():
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "routes"},
+ "[::1]:7081": {"pass": "routes"},
+ },
+ 'listeners',
+ ), 'source listeners configure'
+
+ def get_ipv6():
+ return client.get(sock_type='ipv6', port=7081)
+
+ route_match({"source": "127.0.0.1/32"})
+ assert client.get()['status'] == 200, '32'
+ assert get_ipv6()['status'] == 404, '32 ipv6'
+
+ route_match({"source": "127.0.0.0/32"})
+ assert client.get()['status'] == 404, '32 2'
+ assert get_ipv6()['status'] == 404, '32 2 ipv6'
+
+ route_match({"source": "127.0.0.0/31"})
+ assert client.get()['status'] == 200, '31'
+ assert get_ipv6()['status'] == 404, '31 ipv6'
+
+ route_match({"source": "0.0.0.0/1"})
+ assert client.get()['status'] == 200, '1'
+ assert get_ipv6()['status'] == 404, '1 ipv6'
+
+ route_match({"source": "0.0.0.0/0"})
+ assert client.get()['status'] == 200, '0'
+ assert get_ipv6()['status'] == 404, '0 ipv6'
+
+
+def test_routes_source_cidr_ipv6():
+ assert 'success' in client.conf(
+ {
+ "[::1]:7080": {"pass": "routes"},
+ "127.0.0.1:7081": {"pass": "routes"},
+ },
+ 'listeners',
+ ), 'source listeners configure'
+
+ route_match({"source": "::1/128"})
+ assert client.get(sock_type='ipv6')['status'] == 200, '128'
+ assert client.get(port=7081)['status'] == 404, '128 ipv4'
+
+ route_match({"source": "::0/128"})
+ assert client.get(sock_type='ipv6')['status'] == 404, '128 2'
+ assert client.get(port=7081)['status'] == 404, '128 ipv4'
+
+ route_match({"source": "::0/127"})
+ assert client.get(sock_type='ipv6')['status'] == 200, '127'
+ assert client.get(port=7081)['status'] == 404, '127 ipv4'
+
+ route_match({"source": "::0/32"})
+ assert client.get(sock_type='ipv6')['status'] == 200, '32'
+ assert client.get(port=7081)['status'] == 404, '32 ipv4'
+
+ route_match({"source": "::0/1"})
+ assert client.get(sock_type='ipv6')['status'] == 200, '1'
+ assert client.get(port=7081)['status'] == 404, '1 ipv4'
+
+ route_match({"source": "::/0"})
+ assert client.get(sock_type='ipv6')['status'] == 200, '0'
+ assert client.get(port=7081)['status'] == 404, '0 ipv4'
+
+
+def test_routes_source_unix(temp_dir):
+ addr = f'{temp_dir}/sock'
+
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {"pass": "routes"},
+ f'unix:{addr}': {"pass": "routes"},
+ },
+ 'listeners',
+ ), 'source listeners configure'
+
+ route_match({"source": "!0.0.0.0/0"})
+ assert (
+ client.get(sock_type='unix', addr=addr)['status'] == 200
+ ), 'unix ipv4 neg'
+
+ route_match({"source": "!::/0"})
+ assert (
+ client.get(sock_type='unix', addr=addr)['status'] == 200
+ ), 'unix ipv6 neg'
+
+ route_match({"source": "unix"})
+ assert client.get(port=7081)['status'] == 404, 'unix ipv4'
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200, 'unix'
+
+
+def test_routes_match_source():
+ route_match({"source": "::"})
+ route_match(
+ {
+ "source": [
+ "127.0.0.1",
+ "192.168.0.10:8080",
+ "192.168.0.11:8080-8090",
+ ]
+ }
+ )
+ route_match(
+ {
+ "source": [
+ "10.0.0.0/8",
+ "10.0.0.0/7:1000",
+ "10.0.0.0/32:8080-8090",
+ ]
+ }
+ )
+ route_match(
+ {
+ "source": [
+ "10.0.0.0-10.0.0.1",
+ "10.0.0.0-11.0.0.0:1000",
+ "127.0.0.0-127.0.0.255:8080-8090",
+ ]
+ }
+ )
+ route_match({"source": ["2001::", "[2002::]:8000", "[2003::]:8080-8090"]})
+ route_match(
+ {
+ "source": [
+ "2001::-200f:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
+ "[fe08::-feff::]:8000",
+ "[fff0::-fff0::10]:8080-8090",
+ ]
+ }
+ )
+ route_match(
+ {
+ "source": [
+ "2001::/16",
+ "[0ff::/64]:8000",
+ "[fff0:abcd:ffff:ffff:ffff::/128]:8080-8090",
+ ]
+ }
+ )
+ route_match({"source": "*:0-65535"})
+ assert client.get()['status'] == 200, 'source any'
+
+
+def test_routes_match_source_invalid():
+ route_match_invalid({"source": "127"})
+ route_match_invalid({"source": "256.0.0.1"})
+ route_match_invalid({"source": "127.0.0."})
+ route_match_invalid({"source": " 127.0.0.1"})
+ route_match_invalid({"source": "127.0.0.1:"})
+ route_match_invalid({"source": "127.0.0.1/"})
+ route_match_invalid({"source": "11.0.0.0/33"})
+ route_match_invalid({"source": "11.0.0.0/65536"})
+ route_match_invalid({"source": "11.0.0.0-10.0.0.0"})
+ route_match_invalid({"source": "11.0.0.0:3000-2000"})
+ route_match_invalid({"source": ["11.0.0.0:3000-2000"]})
+ route_match_invalid({"source": "[2001::]:3000-2000"})
+ route_match_invalid({"source": "2001::-2000::"})
+ route_match_invalid({"source": "2001::/129"})
+ route_match_invalid({"source": "::FFFFF"})
+ route_match_invalid({"source": "[::1]:"})
+ route_match_invalid({"source": "[:::]:7080"})
+ route_match_invalid({"source": "*:"})
+ route_match_invalid({"source": "*:1-a"})
+ route_match_invalid({"source": "*:65536"})
+
+
+def test_routes_match_source_none():
+ route_match({"source": []})
+ assert client.get()['status'] == 404, 'source none'
+
+
+def test_routes_match_destination():
+ assert 'success' in client.conf(
+ {"*:7080": {"pass": "routes"}, "*:7081": {"pass": "routes"}},
+ 'listeners',
+ ), 'listeners configure'
+
+ route_match({"destination": "*:7080"})
+ assert client.get()['status'] == 200, 'dest'
+ assert client.get(port=7081)['status'] == 404, 'dest 2'
+
+ route_match({"destination": ["127.0.0.1:7080"]})
+ assert client.get()['status'] == 200, 'dest 3'
+ assert client.get(port=7081)['status'] == 404, 'dest 4'
+
+ route_match({"destination": "!*:7080"})
+ assert client.get()['status'] == 404, 'dest neg'
+ assert client.get(port=7081)['status'] == 200, 'dest neg 2'
+
+ route_match({"destination": ['!*:7080', '!*:7081']})
+ assert client.get()['status'] == 404, 'dest neg 3'
+ assert client.get(port=7081)['status'] == 404, 'dest neg 4'
+
+ route_match({"destination": ['!*:7081', '!*:7082']})
+ assert client.get()['status'] == 200, 'dest neg 5'
+
+ route_match({"destination": ['*:7080', '!*:7080']})
+ assert client.get()['status'] == 404, 'dest neg 6'
+
+ route_match({"destination": ['127.0.0.1:7080', '*:7081', '!*:7080']})
+ assert client.get()['status'] == 404, 'dest neg 7'
+ assert client.get(port=7081)['status'] == 200, 'dest neg 8'
+
+ route_match({"destination": ['!*:7081', '!*:7082', '*:7083']})
+ assert client.get()['status'] == 404, 'dest neg 9'
+
+ route_match({"destination": ['*:7081', '!127.0.0.1:7080', '*:7080']})
+ assert client.get()['status'] == 404, 'dest neg 10'
+ assert client.get(port=7081)['status'] == 200, 'dest neg 11'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/destination/0'
+ ), 'remove destination rule'
+ assert client.get()['status'] == 404, 'dest neg 12'
+ assert client.get(port=7081)['status'] == 404, 'dest neg 13'
+
+ assert 'success' in client.conf_delete(
+ 'routes/0/match/destination/0'
+ ), 'remove destination rule 2'
+ assert client.get()['status'] == 200, 'dest neg 14'
+ assert client.get(port=7081)['status'] == 404, 'dest neg 15'
+
+ assert 'success' in client.conf_post(
+ "\"!127.0.0.1\"", 'routes/0/match/destination'
+ ), 'add destination rule'
+ assert client.get()['status'] == 404, 'dest neg 16'
+ assert client.get(port=7081)['status'] == 404, 'dest neg 17'
+
+
+def test_routes_match_destination_proxy():
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes/first"},
+ "*:7081": {"pass": "routes/second"},
+ },
+ "routes": {
+ "first": [{"action": {"proxy": "http://127.0.0.1:7081"}}],
+ "second": [
+ {
+ "match": {"destination": ["127.0.0.1:7081"]},
+ "action": {"return": 200},
+ }
+ ],
+ },
+ "applications": {},
+ }
+ ), 'proxy configure'
- assert self.get()['status'] == 200, 'proxy'
+ assert client.get()['status'] == 200, 'proxy'
diff --git a/test/test_routing_tls.py b/test/test_routing_tls.py
index 76cfb485..4a97c8e4 100644
--- a/test/test_routing_tls.py
+++ b/test/test_routing_tls.py
@@ -1,28 +1,29 @@
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestRoutingTLS(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- def test_routes_match_scheme_tls(self):
- self.certificate()
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {
- "pass": "routes",
- "tls": {"certificate": 'default'},
- },
+def test_routes_match_scheme_tls():
+ client.certificate()
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {
+ "pass": "routes",
+ "tls": {"certificate": 'default'},
},
- "routes": [
- {"match": {"scheme": "http"}, "action": {"return": 200}},
- {"match": {"scheme": "https"}, "action": {"return": 201}},
- ],
- "applications": {},
- }
- ), 'scheme configure'
+ },
+ "routes": [
+ {"match": {"scheme": "http"}, "action": {"return": 200}},
+ {"match": {"scheme": "https"}, "action": {"return": 201}},
+ ],
+ "applications": {},
+ }
+ ), 'scheme configure'
- assert self.get()['status'] == 200, 'http'
- assert self.get_ssl(port=7081)['status'] == 201, 'https'
+ assert client.get()['status'] == 200, 'http'
+ assert client.get_ssl(port=7081)['status'] == 201, 'https'
diff --git a/test/test_ruby_application.py b/test/test_ruby_application.py
index 068b587b..6f533b70 100644
--- a/test/test_ruby_application.py
+++ b/test/test_ruby_application.py
@@ -2,418 +2,444 @@ import re
import subprocess
import pytest
-from unit.applications.lang.ruby import TestApplicationRuby
+from unit.applications.lang.ruby import ApplicationRuby
+prerequisites = {'modules': {'ruby': 'all'}}
-class TestRubyApplication(TestApplicationRuby):
- prerequisites = {'modules': {'ruby': 'all'}}
+client = ApplicationRuby()
- def test_ruby_application(self):
- self.load('variables')
- body = 'Test body string.'
+def test_ruby_application(date_to_sec_epoch, sec_epoch):
+ client.load('variables')
- resp = self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': 'blah',
- 'Connection': 'close',
- },
- body=body,
- )
+ body = 'Test body string.'
- assert resp['status'] == 200, 'status'
- headers = resp['headers']
- header_server = headers.pop('Server')
- assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
- assert (
- headers.pop('Server-Software') == header_server
- ), 'server software header'
-
- date = headers.pop('Date')
- assert date[-4:] == ' GMT', 'date header timezone'
- assert (
- abs(self.date_to_sec_epoch(date) - self.sec_epoch()) < 5
- ), 'date header'
-
- assert headers == {
- 'Connection': 'close',
- 'Content-Length': str(len(body)),
+ resp = client.post(
+ headers={
+ 'Host': 'localhost',
'Content-Type': 'text/html',
- 'Request-Method': 'POST',
- 'Request-Uri': '/',
- 'Http-Host': 'localhost',
- 'Script-Name': '',
- 'Server-Protocol': 'HTTP/1.1',
'Custom-Header': 'blah',
- 'Rack-Version': '13',
- 'Rack-Url-Scheme': 'http',
- 'Rack-Multithread': 'false',
- 'Rack-Multiprocess': 'true',
- 'Rack-Run-Once': 'false',
- 'Rack-Hijack-Q': 'false',
- 'Rack-Hijack': '',
- 'Rack-Hijack-IO': '',
- }, 'headers'
- assert resp['body'] == body, 'body'
+ 'Connection': 'close',
+ },
+ body=body,
+ )
- def test_ruby_application_query_string(self):
- self.load('query_string')
+ assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ header_server = headers.pop('Server')
+ assert re.search(r'Unit/[\d\.]+', header_server), 'server header'
+ assert (
+ headers.pop('Server-Software') == header_server
+ ), 'server software header'
- resp = self.get(url='/?var1=val1&var2=val2')
+ date = headers.pop('Date')
+ assert date[-4:] == ' GMT', 'date header timezone'
+ assert abs(date_to_sec_epoch(date) - sec_epoch) < 5, 'date header'
- assert (
- resp['headers']['Query-String'] == 'var1=val1&var2=val2'
- ), 'Query-String header'
+ assert headers == {
+ 'Connection': 'close',
+ 'Content-Length': str(len(body)),
+ 'Content-Type': 'text/html',
+ 'Request-Method': 'POST',
+ 'Request-Uri': '/',
+ 'Http-Host': 'localhost',
+ 'Script-Name': '',
+ 'Server-Protocol': 'HTTP/1.1',
+ 'Custom-Header': 'blah',
+ 'Rack-Version': '13',
+ 'Rack-Url-Scheme': 'http',
+ 'Rack-Multithread': 'false',
+ 'Rack-Multiprocess': 'true',
+ 'Rack-Run-Once': 'false',
+ 'Rack-Hijack-Q': 'false',
+ 'Rack-Hijack': '',
+ 'Rack-Hijack-IO': '',
+ }, 'headers'
+ assert resp['body'] == body, 'body'
- def test_ruby_application_query_string_empty(self):
- self.load('query_string')
- resp = self.get(url='/?')
+def test_ruby_application_query_string():
+ client.load('query_string')
- assert resp['status'] == 200, 'query string empty status'
- assert resp['headers']['Query-String'] == '', 'query string empty'
+ resp = client.get(url='/?var1=val1&var2=val2')
- def test_ruby_application_query_string_absent(self):
- self.load('query_string')
+ assert (
+ resp['headers']['Query-String'] == 'var1=val1&var2=val2'
+ ), 'Query-String header'
- resp = self.get()
- assert resp['status'] == 200, 'query string absent status'
- assert resp['headers']['Query-String'] == '', 'query string absent'
+def test_ruby_application_query_string_empty():
+ client.load('query_string')
- @pytest.mark.skip('not yet')
- def test_ruby_application_server_port(self):
- self.load('server_port')
+ resp = client.get(url='/?')
- assert (
- self.get()['headers']['Server-Port'] == '7080'
- ), 'Server-Port header'
+ assert resp['status'] == 200, 'query string empty status'
+ assert resp['headers']['Query-String'] == '', 'query string empty'
- def test_ruby_application_status_int(self):
- self.load('status_int')
- assert self.get()['status'] == 200, 'status int'
+def test_ruby_application_query_string_absent():
+ client.load('query_string')
- def test_ruby_application_input_read_empty(self):
- self.load('input_read_empty')
+ resp = client.get()
- assert self.get()['body'] == '', 'read empty'
+ assert resp['status'] == 200, 'query string absent status'
+ assert resp['headers']['Query-String'] == '', 'query string absent'
- def test_ruby_application_input_read_parts(self):
- self.load('input_read_parts')
- assert (
- self.post(body='0123456789')['body'] == '012345678'
- ), 'input read parts'
+@pytest.mark.skip('not yet')
+def test_ruby_application_server_port():
+ client.load('server_port')
- def test_ruby_application_input_read_buffer(self):
- self.load('input_read_buffer')
+ assert (
+ client.get()['headers']['Server-Port'] == '7080'
+ ), 'Server-Port header'
- assert (
- self.post(body='0123456789')['body'] == '0123456789'
- ), 'input read buffer'
- def test_ruby_application_input_read_buffer_not_empty(self):
- self.load('input_read_buffer_not_empty')
+def test_ruby_application_status_int():
+ client.load('status_int')
- assert (
- self.post(body='0123456789')['body'] == '0123456789'
- ), 'input read buffer not empty'
+ assert client.get()['status'] == 200, 'status int'
- def test_ruby_application_input_gets(self):
- self.load('input_gets')
- body = '0123456789'
+def test_ruby_application_input_read_empty():
+ client.load('input_read_empty')
- assert self.post(body=body)['body'] == body, 'input gets'
+ assert client.get()['body'] == '', 'read empty'
- def test_ruby_application_input_gets_2(self):
- self.load('input_gets')
- assert (
- self.post(body='01234\n56789\n')['body'] == '01234\n'
- ), 'input gets 2'
+def test_ruby_application_input_read_parts():
+ client.load('input_read_parts')
- def test_ruby_application_input_gets_all(self):
- self.load('input_gets_all')
+ assert (
+ client.post(body='0123456789')['body'] == '012345678'
+ ), 'input read parts'
- body = '\n01234\n56789\n\n'
- assert self.post(body=body)['body'] == body, 'input gets all'
+def test_ruby_application_input_read_buffer():
+ client.load('input_read_buffer')
- def test_ruby_application_input_each(self):
- self.load('input_each')
+ assert (
+ client.post(body='0123456789')['body'] == '0123456789'
+ ), 'input read buffer'
- body = '\n01234\n56789\n\n'
- assert self.post(body=body)['body'] == body, 'input each'
+def test_ruby_application_input_read_buffer_not_empty():
+ client.load('input_read_buffer_not_empty')
- @pytest.mark.skip('not yet')
- def test_ruby_application_input_rewind(self):
- self.load('input_rewind')
+ assert (
+ client.post(body='0123456789')['body'] == '0123456789'
+ ), 'input read buffer not empty'
- body = '0123456789'
- assert self.post(body=body)['body'] == body, 'input rewind'
+def test_ruby_application_input_gets():
+ client.load('input_gets')
- @pytest.mark.skip('not yet')
- def test_ruby_application_syntax_error(self, skip_alert):
- skip_alert(
- r'Failed to parse rack script',
- r'syntax error',
- r'new_from_string',
- r'parse_file',
- )
- self.load('syntax_error')
+ body = '0123456789'
- assert self.get()['status'] == 500, 'syntax error'
+ assert client.post(body=body)['body'] == body, 'input gets'
- def test_ruby_application_errors_puts(self):
- self.load('errors_puts')
- assert self.get()['status'] == 200
+def test_ruby_application_input_gets_2():
+ client.load('input_gets')
- assert (
- self.wait_for_record(r'\[error\].+Error in application') is not None
- ), 'errors puts'
+ assert (
+ client.post(body='01234\n56789\n')['body'] == '01234\n'
+ ), 'input gets 2'
- def test_ruby_application_errors_puts_int(self):
- self.load('errors_puts_int')
- assert self.get()['status'] == 200
+def test_ruby_application_input_gets_all():
+ client.load('input_gets_all')
- assert (
- self.wait_for_record(r'\[error\].+1234567890') is not None
- ), 'errors puts int'
+ body = '\n01234\n56789\n\n'
- def test_ruby_application_errors_write(self):
- self.load('errors_write')
+ assert client.post(body=body)['body'] == body, 'input gets all'
- assert self.get()['status'] == 200
- assert (
- self.wait_for_record(r'\[error\].+Error in application') is not None
- ), 'errors write'
- def test_ruby_application_errors_write_to_s_custom(self):
- self.load('errors_write_to_s_custom')
+def test_ruby_application_input_each():
+ client.load('input_each')
- assert self.get()['status'] == 200, 'errors write to_s custom'
+ body = '\n01234\n56789\n\n'
- def test_ruby_application_errors_write_int(self):
- self.load('errors_write_int')
+ assert client.post(body=body)['body'] == body, 'input each'
- assert self.get()['status'] == 200
- assert (
- self.wait_for_record(r'\[error\].+1234567890') is not None
- ), 'errors write int'
- def test_ruby_application_at_exit(self):
- self.load('at_exit')
+@pytest.mark.skip('not yet')
+def test_ruby_application_input_rewind():
+ client.load('input_rewind')
- assert self.get()['status'] == 200
+ body = '0123456789'
- assert 'success' in self.conf({"listeners": {}, "applications": {}})
+ assert client.post(body=body)['body'] == body, 'input rewind'
- assert (
- self.wait_for_record(r'\[error\].+At exit called\.') is not None
- ), 'at exit'
- def test_ruby_application_encoding(self):
- self.load('encoding')
+@pytest.mark.skip('not yet')
+def test_ruby_application_syntax_error(skip_alert):
+ skip_alert(
+ r'Failed to parse rack script',
+ r'syntax error',
+ r'new_from_string',
+ r'parse_file',
+ )
+ client.load('syntax_error')
- try:
- locales = (
- subprocess.check_output(
- ['locale', '-a'],
- stderr=subprocess.STDOUT,
- )
- .decode()
- .split('\n')
- )
+ assert client.get()['status'] == 500, 'syntax error'
- except (FileNotFoundError, subprocess.CalledProcessError):
- pytest.skip('require locale')
-
- def get_locale(pattern):
- return next(
- (
- l
- for l in locales
- if re.match(pattern, l.upper()) is not None
- ),
- None,
- )
- utf8 = get_locale(r'.*UTF[-_]?8')
- iso88591 = get_locale(r'.*ISO[-_]?8859[-_]?1')
+def test_ruby_application_errors_puts(wait_for_record):
+ client.load('errors_puts')
+
+ assert client.get()['status'] == 200
+
+ assert (
+ wait_for_record(r'\[error\].+Error in application') is not None
+ ), 'errors puts'
+
+
+def test_ruby_application_errors_puts_int(wait_for_record):
+ client.load('errors_puts_int')
+
+ assert client.get()['status'] == 200
+
+ assert (
+ wait_for_record(r'\[error\].+1234567890') is not None
+ ), 'errors puts int'
- def check_locale(enc):
- assert 'success' in self.conf(
- {"LC_CTYPE": enc, "LC_ALL": ""},
- '/config/applications/encoding/environment',
- )
- resp = self.get()
- assert resp['status'] == 200, 'status'
+def test_ruby_application_errors_write(wait_for_record):
+ client.load('errors_write')
- enc_default = re.sub(r'[-_]', '', resp['headers']['X-Enc']).upper()
- assert (
- enc_default == re.sub(r'[-_]', '', enc.split('.')[-1]).upper()
+ assert client.get()['status'] == 200
+ assert (
+ wait_for_record(r'\[error\].+Error in application') is not None
+ ), 'errors write'
+
+
+def test_ruby_application_errors_write_to_s_custom():
+ client.load('errors_write_to_s_custom')
+
+ assert client.get()['status'] == 200, 'errors write to_s custom'
+
+
+def test_ruby_application_errors_write_int(wait_for_record):
+ client.load('errors_write_int')
+
+ assert client.get()['status'] == 200
+ assert (
+ wait_for_record(r'\[error\].+1234567890') is not None
+ ), 'errors write int'
+
+
+def test_ruby_application_at_exit(wait_for_record):
+ client.load('at_exit')
+
+ assert client.get()['status'] == 200
+
+ assert 'success' in client.conf({"listeners": {}, "applications": {}})
+
+ assert (
+ wait_for_record(r'\[error\].+At exit called\.') is not None
+ ), 'at exit'
+
+
+def test_ruby_application_encoding():
+ client.load('encoding')
+
+ try:
+ locales = (
+ subprocess.check_output(
+ ['locale', '-a'],
+ stderr=subprocess.STDOUT,
)
+ .decode()
+ .split('\n')
+ )
+
+ except (FileNotFoundError, subprocess.CalledProcessError):
+ pytest.skip('require locale')
+
+ def get_locale(pattern):
+ return next(
+ (l for l in locales if re.match(pattern, l.upper()) is not None),
+ None,
+ )
- if utf8:
- check_locale(utf8)
+ utf8 = get_locale(r'.*UTF[-_]?8')
+ iso88591 = get_locale(r'.*ISO[-_]?8859[-_]?1')
- if iso88591:
- check_locale(iso88591)
+ def check_locale(enc):
+ assert 'success' in client.conf(
+ {"LC_CTYPE": enc, "LC_ALL": ""},
+ '/config/applications/encoding/environment',
+ )
- if not utf8 and not iso88591:
- pytest.skip('no available locales')
+ resp = client.get()
+ assert resp['status'] == 200, 'status'
- def test_ruby_application_header_custom(self):
- self.load('header_custom')
+ enc_default = re.sub(r'[-_]', '', resp['headers']['X-Enc']).upper()
+ assert enc_default == re.sub(r'[-_]', '', enc.split('.')[-1]).upper()
- resp = self.post(body="\ntc=one,two\ntc=three,four,\n\n")
+ if utf8:
+ check_locale(utf8)
- assert resp['headers']['Custom-Header'] == [
- '',
- 'tc=one,two',
- 'tc=three,four,',
- '',
- '',
- ], 'header custom'
+ if iso88591:
+ check_locale(iso88591)
- @pytest.mark.skip('not yet')
- def test_ruby_application_header_custom_non_printable(self):
- self.load('header_custom')
+ if not utf8 and not iso88591:
+ pytest.skip('no available locales')
- assert (
- self.post(body='\b')['status'] == 500
- ), 'header custom non printable'
- def test_ruby_application_header_status(self):
- self.load('header_status')
+def test_ruby_application_header_custom():
+ client.load('header_custom')
- assert self.get()['status'] == 200, 'header status'
+ resp = client.post(body="\ntc=one,two\ntc=three,four,\n\n")
- @pytest.mark.skip('not yet')
- def test_ruby_application_header_rack(self):
- self.load('header_rack')
+ assert resp['headers']['Custom-Header'] == [
+ '',
+ 'tc=one,two',
+ 'tc=three,four,',
+ '',
+ '',
+ ], 'header custom'
- assert self.get()['status'] == 500, 'header rack'
- def test_ruby_application_body_empty(self):
- self.load('body_empty')
+@pytest.mark.skip('not yet')
+def test_ruby_application_header_custom_non_printable():
+ client.load('header_custom')
- assert self.get()['body'] == '', 'body empty'
+ assert (
+ client.post(body='\b')['status'] == 500
+ ), 'header custom non printable'
- def test_ruby_application_body_array(self):
- self.load('body_array')
- assert self.get()['body'] == '0123456789', 'body array'
+def test_ruby_application_header_status():
+ client.load('header_status')
- def test_ruby_application_body_large(self):
- self.load('mirror')
+ assert client.get()['status'] == 200, 'header status'
- body = '0123456789' * 1000
- assert self.post(body=body)['body'] == body, 'body large'
+@pytest.mark.skip('not yet')
+def test_ruby_application_header_rack():
+ client.load('header_rack')
- @pytest.mark.skip('not yet')
- def test_ruby_application_body_each_error(self):
- self.load('body_each_error')
+ assert client.get()['status'] == 500, 'header rack'
- assert self.get()['status'] == 500, 'body each error status'
- assert (
- self.wait_for_record(r'\[error\].+Failed to run ruby script')
- is not None
- ), 'body each error'
+def test_ruby_application_body_empty():
+ client.load('body_empty')
- def test_ruby_application_body_file(self):
- self.load('body_file')
+ assert client.get()['body'] == '', 'body empty'
- assert self.get()['body'] == 'body\n', 'body file'
- def test_ruby_keepalive_body(self):
- self.load('mirror')
+def test_ruby_application_body_array():
+ client.load('body_array')
- assert self.get()['status'] == 200, 'init'
+ assert client.get()['body'] == '0123456789', 'body array'
- body = '0123456789' * 500
- (resp, sock) = self.post(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body=body,
- read_timeout=1,
- )
- assert resp['body'] == body, 'keep-alive 1'
+def test_ruby_application_body_large():
+ client.load('mirror')
- body = '0123456789'
- resp = self.post(sock=sock, body=body)
+ body = '0123456789' * 1000
- assert resp['body'] == body, 'keep-alive 2'
+ assert client.post(body=body)['body'] == body, 'body large'
- def test_ruby_application_constants(self):
- self.load('constants')
- resp = self.get()
+@pytest.mark.skip('not yet')
+def test_ruby_application_body_each_error(wait_for_record):
+ client.load('body_each_error')
- assert resp['status'] == 200, 'status'
+ assert client.get()['status'] == 500, 'body each error status'
- headers = resp['headers']
- assert len(headers['X-Copyright']) > 0, 'RUBY_COPYRIGHT'
- assert len(headers['X-Description']) > 0, 'RUBY_DESCRIPTION'
- assert len(headers['X-Engine']) > 0, 'RUBY_ENGINE'
- assert len(headers['X-Engine-Version']) > 0, 'RUBY_ENGINE_VERSION'
- assert len(headers['X-Patchlevel']) > 0, 'RUBY_PATCHLEVEL'
- assert len(headers['X-Platform']) > 0, 'RUBY_PLATFORM'
- assert len(headers['X-Release-Date']) > 0, 'RUBY_RELEASE_DATE'
- assert len(headers['X-Revision']) > 0, 'RUBY_REVISION'
- assert len(headers['X-Version']) > 0, 'RUBY_VERSION'
-
- def test_ruby_application_threads(self):
- self.load('threads')
-
- assert 'success' in self.conf(
- '4', 'applications/threads/threads'
- ), 'configure 4 threads'
-
- socks = []
-
- for i in range(4):
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
- },
- no_recv=True,
- )
+ assert (
+ wait_for_record(r'\[error\].+Failed to run ruby script') is not None
+ ), 'body each error'
+
+
+def test_ruby_application_body_file():
+ client.load('body_file')
+
+ assert client.get()['body'] == 'body\n', 'body file'
+
+
+def test_ruby_keepalive_body():
+ client.load('mirror')
+
+ assert client.get()['status'] == 200, 'init'
+
+ body = '0123456789' * 500
+ (resp, sock) = client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body=body,
+ read_timeout=1,
+ )
+
+ assert resp['body'] == body, 'keep-alive 1'
+
+ body = '0123456789'
+ resp = client.post(sock=sock, body=body)
- socks.append(sock)
+ assert resp['body'] == body, 'keep-alive 2'
- threads = set()
- for sock in socks:
- resp = self.recvall(sock).decode('utf-8')
+def test_ruby_application_constants():
+ client.load('constants')
- self.log_in(resp)
+ resp = client.get()
- resp = self._resp_to_dict(resp)
+ assert resp['status'] == 200, 'status'
- assert resp['status'] == 200, 'status'
+ headers = resp['headers']
+ assert len(headers['X-Copyright']) > 0, 'RUBY_COPYRIGHT'
+ assert len(headers['X-Description']) > 0, 'RUBY_DESCRIPTION'
+ assert len(headers['X-Engine']) > 0, 'RUBY_ENGINE'
+ assert len(headers['X-Engine-Version']) > 0, 'RUBY_ENGINE_VERSION'
+ assert len(headers['X-Patchlevel']) > 0, 'RUBY_PATCHLEVEL'
+ assert len(headers['X-Platform']) > 0, 'RUBY_PLATFORM'
+ assert len(headers['X-Release-Date']) > 0, 'RUBY_RELEASE_DATE'
+ assert len(headers['X-Revision']) > 0, 'RUBY_REVISION'
+ assert len(headers['X-Version']) > 0, 'RUBY_VERSION'
+
+
+def test_ruby_application_threads():
+ client.load('threads')
+
+ assert 'success' in client.conf(
+ '4', 'applications/threads/threads'
+ ), 'configure 4 threads'
+
+ socks = []
+
+ for _ in range(4):
+ sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+
+ socks.append(sock)
+
+ threads = set()
+
+ for sock in socks:
+ resp = client.recvall(sock).decode('utf-8')
+
+ client.log_in(resp)
+
+ resp = client._resp_to_dict(resp)
+
+ assert resp['status'] == 200, 'status'
- threads.add(resp['headers']['X-Thread'])
+ threads.add(resp['headers']['X-Thread'])
- assert resp['headers']['Rack-Multithread'] == 'true', 'multithread'
+ assert resp['headers']['Rack-Multithread'] == 'true', 'multithread'
- sock.close()
+ sock.close()
- assert len(socks) == len(threads), 'threads differs'
+ assert len(socks) == len(threads), 'threads differs'
diff --git a/test/test_ruby_hooks.py b/test/test_ruby_hooks.py
index 078e5723..38893e47 100644
--- a/test/test_ruby_hooks.py
+++ b/test/test_ruby_hooks.py
@@ -1,94 +1,99 @@
-from unit.applications.lang.ruby import TestApplicationRuby
+from unit.applications.lang.ruby import ApplicationRuby
from unit.option import option
from unit.utils import waitforglob
+prerequisites = {'modules': {'ruby': 'all'}}
-class TestRubyHooks(TestApplicationRuby):
- prerequisites = {'modules': {'ruby': 'all'}}
+client = ApplicationRuby()
- def _wait_cookie(self, pattern, count):
- return waitforglob(
- f'{option.temp_dir}/ruby/hooks/cookie_{pattern}', count
- )
- def test_ruby_hooks_eval(self):
- processes = 2
+def wait_cookie(pattern, count):
+ return waitforglob(f'{option.temp_dir}/ruby/hooks/cookie_{pattern}', count)
- self.load('hooks', processes=processes, hooks='eval.rb')
- hooked = self._wait_cookie('eval.*', processes)
+def test_ruby_hooks_eval():
+ processes = 2
- assert hooked, 'hooks evaluated'
+ client.load('hooks', processes=processes, hooks='eval.rb')
- def test_ruby_hooks_on_worker_boot(self):
- processes = 2
+ hooked = wait_cookie('eval.*', processes)
- self.load('hooks', processes=processes, hooks='on_worker_boot.rb')
+ assert hooked, 'hooks evaluated'
- hooked = self._wait_cookie('worker_boot.*', processes)
- assert hooked, 'on_worker_boot called'
+def test_ruby_hooks_on_worker_boot():
+ processes = 2
- def test_ruby_hooks_on_worker_shutdown(self):
- processes = 2
+ client.load('hooks', processes=processes, hooks='on_worker_boot.rb')
- self.load('hooks', processes=processes, hooks='on_worker_shutdown.rb')
+ hooked = wait_cookie('worker_boot.*', processes)
- assert self.get()['status'] == 200, 'app response'
+ assert hooked, 'on_worker_boot called'
- self.load('empty')
- hooked = self._wait_cookie('worker_shutdown.*', processes)
+def test_ruby_hooks_on_worker_shutdown():
+ processes = 2
- assert hooked, 'on_worker_shutdown called'
+ client.load('hooks', processes=processes, hooks='on_worker_shutdown.rb')
- def test_ruby_hooks_on_thread_boot(self):
- processes = 1
- threads = 2
+ assert client.get()['status'] == 200, 'app response'
- self.load(
- 'hooks',
- processes=processes,
- threads=threads,
- hooks='on_thread_boot.rb',
- )
+ client.load('empty')
- hooked = self._wait_cookie('thread_boot.*', processes * threads)
+ hooked = wait_cookie('worker_shutdown.*', processes)
- assert hooked, 'on_thread_boot called'
+ assert hooked, 'on_worker_shutdown called'
- def test_ruby_hooks_on_thread_shutdown(self):
- processes = 1
- threads = 2
- self.load(
- 'hooks',
- processes=processes,
- threads=threads,
- hooks='on_thread_shutdown.rb',
- )
+def test_ruby_hooks_on_thread_boot():
+ processes = 1
+ threads = 2
- assert self.get()['status'] == 200, 'app response'
+ client.load(
+ 'hooks',
+ processes=processes,
+ threads=threads,
+ hooks='on_thread_boot.rb',
+ )
- self.load('empty')
+ hooked = wait_cookie('thread_boot.*', processes * threads)
- hooked = self._wait_cookie('thread_shutdown.*', processes * threads)
+ assert hooked, 'on_thread_boot called'
- assert hooked, 'on_thread_shutdown called'
- def test_ruby_hooks_multiple(self):
- processes = 1
- threads = 1
+def test_ruby_hooks_on_thread_shutdown():
+ processes = 1
+ threads = 2
- self.load(
- 'hooks',
- processes=processes,
- threads=threads,
- hooks='multiple.rb',
- )
+ client.load(
+ 'hooks',
+ processes=processes,
+ threads=threads,
+ hooks='on_thread_shutdown.rb',
+ )
- hooked = self._wait_cookie('worker_boot.*', processes)
- assert hooked, 'on_worker_boot called'
+ assert client.get()['status'] == 200, 'app response'
- hooked = self._wait_cookie('thread_boot.*', threads)
- assert hooked, 'on_thread_boot called'
+ client.load('empty')
+
+ hooked = wait_cookie('thread_shutdown.*', processes * threads)
+
+ assert hooked, 'on_thread_shutdown called'
+
+
+def test_ruby_hooks_multiple():
+ processes = 1
+ threads = 1
+
+ client.load(
+ 'hooks',
+ processes=processes,
+ threads=threads,
+ hooks='multiple.rb',
+ )
+
+ hooked = wait_cookie('worker_boot.*', processes)
+ assert hooked, 'on_worker_boot called'
+
+ hooked = wait_cookie('thread_boot.*', threads)
+ assert hooked, 'on_thread_boot called'
diff --git a/test/test_ruby_isolation.py b/test/test_ruby_isolation.py
index ea208523..59c0e5f6 100644
--- a/test/test_ruby_isolation.py
+++ b/test/test_ruby_isolation.py
@@ -1,46 +1,43 @@
-import pytest
-from unit.applications.lang.ruby import TestApplicationRuby
-from unit.option import option
+from unit.applications.lang.ruby import ApplicationRuby
+prerequisites = {'modules': {'ruby': 'any'}, 'features': {'isolation': True}}
-class TestRubyIsolation(TestApplicationRuby):
- prerequisites = {'modules': {'ruby': 'any'}, 'features': ['isolation']}
+client = ApplicationRuby()
- def test_ruby_isolation_rootfs(self, is_su):
- isolation_features = option.available['features']['isolation'].keys()
- if not is_su:
- if not 'unprivileged_userns_clone' in isolation_features:
- pytest.skip('requires unprivileged userns or root')
+def test_ruby_isolation_rootfs(is_su, require, temp_dir):
+ isolation = {'rootfs': temp_dir}
- if 'user' not in isolation_features:
- pytest.skip('user namespace is not supported')
-
- if 'mnt' not in isolation_features:
- pytest.skip('mnt namespace is not supported')
-
- if 'pid' not in isolation_features:
- pytest.skip('pid namespace is not supported')
-
- isolation = {'rootfs': option.temp_dir}
-
- if not is_su:
- isolation['namespaces'] = {
- 'mount': True,
- 'credential': True,
- 'pid': True,
+ if not is_su:
+ require(
+ {
+ 'features': {
+ 'isolation': [
+ 'unprivileged_userns_clone',
+ 'user',
+ 'mnt',
+ 'pid',
+ ]
+ }
}
+ )
- self.load('status_int', isolation=isolation)
+ isolation['namespaces'] = {
+ 'mount': True,
+ 'credential': True,
+ 'pid': True,
+ }
- assert 'success' in self.conf(
- '"/ruby/status_int/config.ru"',
- 'applications/status_int/script',
- )
+ client.load('status_int', isolation=isolation)
- assert 'success' in self.conf(
- '"/ruby/status_int"',
- 'applications/status_int/working_directory',
- )
+ assert 'success' in client.conf(
+ '"/ruby/status_int/config.ru"',
+ 'applications/status_int/script',
+ )
+
+ assert 'success' in client.conf(
+ '"/ruby/status_int"',
+ 'applications/status_int/working_directory',
+ )
- assert self.get()['status'] == 200, 'status int'
+ assert client.get()['status'] == 200, 'status int'
diff --git a/test/test_settings.py b/test/test_settings.py
index 21ab22d9..33180046 100644
--- a/test/test_settings.py
+++ b/test/test_settings.py
@@ -1,198 +1,197 @@
import re
import socket
+import subprocess
import time
import pytest
-from unit.applications.lang.python import TestApplicationPython
-from unit.utils import sysctl
+from unit.applications.lang.python import ApplicationPython
+prerequisites = {'modules': {'python': 'any'}}
-class TestSettings(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def test_settings_large_header_buffer_size(self):
- self.load('empty')
- def set_buffer_size(size):
- assert 'success' in self.conf(
- {'http': {'large_header_buffer_size': size}},
- 'settings',
- )
+def sysctl():
+ try:
+ out = subprocess.check_output(
+ ['sysctl', '-a'], stderr=subprocess.STDOUT
+ ).decode()
+ except FileNotFoundError:
+ pytest.skip('requires sysctl')
- def header_value(size, expect=200):
- headers = {'Host': 'a' * (size - 1), 'Connection': 'close'}
- assert self.get(headers=headers)['status'] == expect
+ return out
- set_buffer_size(4096)
- header_value(4096)
- header_value(4097, 431)
- set_buffer_size(16384)
- header_value(16384)
- header_value(16385, 431)
+def test_settings_large_header_buffer_size():
+ client.load('empty')
- def test_settings_large_header_buffers(self):
- self.load('empty')
+ def set_buffer_size(size):
+ assert 'success' in client.conf(
+ {'http': {'large_header_buffer_size': size}},
+ 'settings',
+ )
- def set_buffers(buffers):
- assert 'success' in self.conf(
- {'http': {'large_header_buffers': buffers}},
- 'settings',
- )
+ def header_value(size, expect=200):
+ headers = {'Host': 'a' * (size - 1), 'Connection': 'close'}
+ assert client.get(headers=headers)['status'] == expect
- def big_headers(headers_num, expect=200):
- headers = {'Host': 'localhost', 'Connection': 'close'}
+ set_buffer_size(4096)
+ header_value(4096)
+ header_value(4097, 431)
- for i in range(headers_num):
- headers[f'Custom-header-{i}'] = 'a' * 8000
+ set_buffer_size(16384)
+ header_value(16384)
+ header_value(16385, 431)
- assert self.get(headers=headers)['status'] == expect
- set_buffers(1)
- big_headers(1)
- big_headers(2, 431)
+def test_settings_large_header_buffers():
+ client.load('empty')
- set_buffers(2)
- big_headers(2)
- big_headers(3, 431)
+ def set_buffers(buffers):
+ assert 'success' in client.conf(
+ {'http': {'large_header_buffers': buffers}},
+ 'settings',
+ )
- set_buffers(8)
- big_headers(8)
- big_headers(9, 431)
+ def big_headers(headers_num, expect=200):
+ headers = {'Host': 'localhost', 'Connection': 'close'}
- @pytest.mark.skip('not yet')
- def test_settings_large_header_buffer_invalid(self):
- def check_error(conf):
- assert 'error' in self.conf({'http': conf}, 'settings')
+ for i in range(headers_num):
+ headers[f'Custom-header-{i}'] = 'a' * 8000
- check_error({'large_header_buffer_size': -1})
- check_error({'large_header_buffer_size': 0})
- check_error({'large_header_buffers': -1})
- check_error({'large_header_buffers': 0})
+ assert client.get(headers=headers)['status'] == expect
- def test_settings_header_read_timeout(self):
- self.load('empty')
+ set_buffers(1)
+ big_headers(1)
+ big_headers(2, 431)
- def req():
- (resp, sock) = self.http(
- b"""GET / HTTP/1.1
-""",
- start=True,
- read_timeout=1,
- raw=True,
- )
+ set_buffers(2)
+ big_headers(2)
+ big_headers(3, 431)
- time.sleep(3)
+ set_buffers(8)
+ big_headers(8)
+ big_headers(9, 431)
- return self.http(
- b"""Host: localhost
-Connection: close
- """,
- sock=sock,
- raw=True,
- )
+@pytest.mark.skip('not yet')
+def test_settings_large_header_buffer_invalid():
+ def check_error(conf):
+ assert 'error' in client.conf({'http': conf}, 'settings')
- assert 'success' in self.conf(
- {'http': {'header_read_timeout': 2}}, 'settings'
- )
- assert req()['status'] == 408, 'status header read timeout'
+ check_error({'large_header_buffer_size': -1})
+ check_error({'large_header_buffer_size': 0})
+ check_error({'large_header_buffers': -1})
+ check_error({'large_header_buffers': 0})
- assert 'success' in self.conf(
- {'http': {'header_read_timeout': 7}}, 'settings'
- )
- assert req()['status'] == 200, 'status header read timeout 2'
- def test_settings_header_read_timeout_update(self):
- self.load('empty')
+def test_settings_server_version():
+ client.load('empty')
- assert 'success' in self.conf(
- {'http': {'header_read_timeout': 4}}, 'settings'
- )
+ assert client.get()['headers']['Server'].startswith('Unit/')
+
+ assert 'success' in client.conf(
+ {"http": {"server_version": False}}, 'settings'
+ ), 'remove version'
+ assert client.get()['headers']['Server'] == 'Unit'
+
+ assert 'success' in client.conf(
+ {"http": {"server_version": True}}, 'settings'
+ ), 'add version'
+ assert client.get()['headers']['Server'].startswith('Unit/')
- sock = self.http(
+
+def test_settings_header_read_timeout():
+ client.load('empty')
+
+ def req():
+ (_, sock) = client.http(
b"""GET / HTTP/1.1
""",
+ start=True,
+ read_timeout=1,
raw=True,
- no_recv=True,
)
- time.sleep(2)
+ time.sleep(3)
- sock = self.http(
+ return client.http(
b"""Host: localhost
+Connection: close
+
""",
sock=sock,
raw=True,
- no_recv=True,
)
- time.sleep(2)
+ assert 'success' in client.conf(
+ {'http': {'header_read_timeout': 2}}, 'settings'
+ )
+ assert req()['status'] == 408, 'status header read timeout'
- (resp, sock) = self.http(
- b"""X-Blah: blah
-""",
- start=True,
- sock=sock,
- read_timeout=1,
- raw=True,
- )
+ assert 'success' in client.conf(
+ {'http': {'header_read_timeout': 7}}, 'settings'
+ )
+ assert req()['status'] == 200, 'status header read timeout 2'
- if len(resp) != 0:
- sock.close()
- else:
- time.sleep(2)
+def test_settings_header_read_timeout_update():
+ client.load('empty')
- resp = self.http(
- b"""Connection: close
+ assert 'success' in client.conf(
+ {'http': {'header_read_timeout': 4}}, 'settings'
+ )
+ sock = client.http(
+ b"""GET / HTTP/1.1
""",
- sock=sock,
- raw=True,
- )
+ raw=True,
+ no_recv=True,
+ )
- assert resp['status'] == 408, 'status header read timeout update'
+ time.sleep(2)
- def test_settings_body_read_timeout(self):
- self.load('empty')
+ sock = client.http(
+ b"""Host: localhost
+""",
+ sock=sock,
+ raw=True,
+ no_recv=True,
+ )
- def req():
- (resp, sock) = self.http(
- b"""POST / HTTP/1.1
-Host: localhost
-Content-Length: 10
-Connection: close
+ time.sleep(2)
+ (resp, sock) = client.http(
+ b"""X-Blah: blah
""",
- start=True,
- raw_resp=True,
- read_timeout=1,
- raw=True,
- )
+ start=True,
+ sock=sock,
+ read_timeout=1,
+ raw=True,
+ )
- time.sleep(3)
+ if len(resp) != 0:
+ sock.close()
- return self.http(b"""0123456789""", sock=sock, raw=True)
+ else:
+ time.sleep(2)
- assert 'success' in self.conf(
- {'http': {'body_read_timeout': 2}}, 'settings'
- )
- assert req()['status'] == 408, 'status body read timeout'
+ resp = client.http(
+ b"""Connection: close
- assert 'success' in self.conf(
- {'http': {'body_read_timeout': 7}}, 'settings'
+""",
+ sock=sock,
+ raw=True,
)
- assert req()['status'] == 200, 'status body read timeout 2'
- def test_settings_body_read_timeout_update(self):
- self.load('empty')
+ assert resp['status'] == 408, 'status header read timeout update'
- assert 'success' in self.conf(
- {'http': {'body_read_timeout': 4}}, 'settings'
- )
- (resp, sock) = self.http(
+def test_settings_body_read_timeout():
+ client.load('empty')
+
+ def req():
+ (_, sock) = client.http(
b"""POST / HTTP/1.1
Host: localhost
Content-Length: 10
@@ -200,350 +199,389 @@ Connection: close
""",
start=True,
+ raw_resp=True,
read_timeout=1,
raw=True,
)
- time.sleep(2)
+ time.sleep(3)
- (resp, sock) = self.http(
- b"""012""", start=True, sock=sock, read_timeout=1, raw=True
- )
+ return client.http(b"""0123456789""", sock=sock, raw=True)
- time.sleep(2)
+ assert 'success' in client.conf(
+ {'http': {'body_read_timeout': 2}}, 'settings'
+ )
+ assert req()['status'] == 408, 'status body read timeout'
- (resp, sock) = self.http(
- b"""345""", start=True, sock=sock, read_timeout=1, raw=True
- )
+ assert 'success' in client.conf(
+ {'http': {'body_read_timeout': 7}}, 'settings'
+ )
+ assert req()['status'] == 200, 'status body read timeout 2'
- time.sleep(2)
- resp = self.http(b"""6789""", sock=sock, raw=True)
+def test_settings_body_read_timeout_update():
+ client.load('empty')
+
+ assert 'success' in client.conf(
+ {'http': {'body_read_timeout': 4}}, 'settings'
+ )
+
+ (resp, sock) = client.http(
+ b"""POST / HTTP/1.1
+Host: localhost
+Content-Length: 10
+Connection: close
+
+""",
+ start=True,
+ read_timeout=1,
+ raw=True,
+ )
+
+ time.sleep(2)
+
+ (resp, sock) = client.http(
+ b"""012""", start=True, sock=sock, read_timeout=1, raw=True
+ )
+
+ time.sleep(2)
+
+ (resp, sock) = client.http(
+ b"""345""", start=True, sock=sock, read_timeout=1, raw=True
+ )
+
+ time.sleep(2)
+
+ resp = client.http(b"""6789""", sock=sock, raw=True)
- assert resp['status'] == 200, 'status body read timeout update'
+ assert resp['status'] == 200, 'status body read timeout update'
- def test_settings_send_timeout(self, temp_dir):
- self.load('body_generate')
- def req(addr, data_len):
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.connect(addr)
+def test_settings_send_timeout(temp_dir):
+ client.load('body_generate')
- req = f"""GET / HTTP/1.1
+ def req(addr, data_len):
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.connect(addr)
+
+ req = f"""GET / HTTP/1.1
Host: localhost
X-Length: {data_len}
Connection: close
"""
- sock.sendall(req.encode())
+ sock.sendall(req.encode())
- data = sock.recv(16).decode()
+ data = sock.recv(16).decode()
- time.sleep(3)
+ time.sleep(3)
- data += self.recvall(sock).decode()
+ data += client.recvall(sock).decode()
- sock.close()
+ sock.close()
- return data
+ return data
- sysctl_out = sysctl()
- values = re.findall(
- r'net.core.[rw]mem_(?:max|default).*?(\d+)', sysctl_out
- )
- values = [int(v) for v in values]
+ sysctl_out = sysctl()
+ values = re.findall(r'net.core.[rw]mem_(?:max|default).*?(\d+)', sysctl_out)
+ values = [int(v) for v in values]
- data_len = 1048576 if len(values) == 0 else 10 * max(values)
+ data_len = 1048576 if len(values) == 0 else 10 * max(values)
- addr = f'{temp_dir}/sock'
+ addr = f'{temp_dir}/sock'
- assert 'success' in self.conf(
- {f'unix:{addr}': {'application': 'body_generate'}}, 'listeners'
- )
+ assert 'success' in client.conf(
+ {f'unix:{addr}': {'application': 'body_generate'}}, 'listeners'
+ )
- assert 'success' in self.conf({'http': {'send_timeout': 1}}, 'settings')
+ assert 'success' in client.conf({'http': {'send_timeout': 1}}, 'settings')
- data = req(addr, data_len)
- assert re.search(r'200 OK', data), 'send timeout status'
- assert len(data) < data_len, 'send timeout data '
+ data = req(addr, data_len)
+ assert re.search(r'200 OK', data), 'send timeout status'
+ assert len(data) < data_len, 'send timeout data '
- self.conf({'http': {'send_timeout': 7}}, 'settings')
+ client.conf({'http': {'send_timeout': 7}}, 'settings')
- data = req(addr, data_len)
- assert re.search(r'200 OK', data), 'send timeout status 2'
- assert len(data) > data_len, 'send timeout data 2'
+ data = req(addr, data_len)
+ assert re.search(r'200 OK', data), 'send timeout status 2'
+ assert len(data) > data_len, 'send timeout data 2'
- def test_settings_idle_timeout(self):
- self.load('empty')
- def req():
- (resp, sock) = self.get(
- headers={'Host': 'localhost', 'Connection': 'keep-alive'},
- start=True,
- read_timeout=1,
- )
+def test_settings_idle_timeout():
+ client.load('empty')
- time.sleep(3)
+ def req():
+ (_, sock) = client.get(
+ headers={'Host': 'localhost', 'Connection': 'keep-alive'},
+ start=True,
+ read_timeout=1,
+ )
- return self.get(sock=sock)
+ time.sleep(3)
- assert self.get()['status'] == 200, 'init'
+ return client.get(sock=sock)
- assert 'success' in self.conf({'http': {'idle_timeout': 2}}, 'settings')
- assert req()['status'] == 408, 'status idle timeout'
+ assert client.get()['status'] == 200, 'init'
- assert 'success' in self.conf({'http': {'idle_timeout': 7}}, 'settings')
- assert req()['status'] == 200, 'status idle timeout 2'
+ assert 'success' in client.conf({'http': {'idle_timeout': 2}}, 'settings')
+ assert req()['status'] == 408, 'status idle timeout'
- def test_settings_idle_timeout_2(self):
- self.load('empty')
+ assert 'success' in client.conf({'http': {'idle_timeout': 7}}, 'settings')
+ assert req()['status'] == 200, 'status idle timeout 2'
- def req():
- sock = self.http(b'', raw=True, no_recv=True)
- time.sleep(3)
+def test_settings_idle_timeout_2():
+ client.load('empty')
- return self.get(sock=sock)
+ def req():
+ sock = client.http(b'', raw=True, no_recv=True)
- assert self.get()['status'] == 200, 'init'
+ time.sleep(3)
- assert 'success' in self.conf({'http': {'idle_timeout': 1}}, 'settings')
- assert req()['status'] == 408, 'status idle timeout'
+ return client.get(sock=sock)
- assert 'success' in self.conf({'http': {'idle_timeout': 7}}, 'settings')
- assert req()['status'] == 200, 'status idle timeout 2'
+ assert client.get()['status'] == 200, 'init'
- def test_settings_max_body_size(self):
- self.load('empty')
+ assert 'success' in client.conf({'http': {'idle_timeout': 1}}, 'settings')
+ assert req()['status'] == 408, 'status idle timeout'
- assert 'success' in self.conf(
- {'http': {'max_body_size': 5}}, 'settings'
- )
+ assert 'success' in client.conf({'http': {'idle_timeout': 7}}, 'settings')
+ assert req()['status'] == 200, 'status idle timeout 2'
- assert self.post(body='01234')['status'] == 200, 'status size'
- assert self.post(body='012345')['status'] == 413, 'status size max'
- def test_settings_max_body_size_large(self):
- self.load('mirror')
+def test_settings_max_body_size():
+ client.load('empty')
- assert 'success' in self.conf(
- {'http': {'max_body_size': 32 * 1024 * 1024}}, 'settings'
- )
+ assert 'success' in client.conf({'http': {'max_body_size': 5}}, 'settings')
- body = '0123456789abcdef' * 4 * 64 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['status'] == 200, 'status size 4'
- assert resp['body'] == body, 'status body 4'
-
- body = '0123456789abcdef' * 8 * 64 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['status'] == 200, 'status size 8'
- assert resp['body'] == body, 'status body 8'
-
- body = '0123456789abcdef' * 16 * 64 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['status'] == 200, 'status size 16'
- assert resp['body'] == body, 'status body 16'
-
- body = '0123456789abcdef' * 32 * 64 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert resp['status'] == 200, 'status size 32'
- assert resp['body'] == body, 'status body 32'
-
- @pytest.mark.skip('not yet')
- def test_settings_negative_value(self):
- assert 'error' in self.conf(
- {'http': {'max_body_size': -1}}, 'settings'
- ), 'settings negative value'
-
- def test_settings_body_buffer_size(self):
- self.load('mirror')
-
- assert 'success' in self.conf(
- {
- 'http': {
- 'max_body_size': 64 * 1024 * 1024,
- 'body_buffer_size': 32 * 1024 * 1024,
- }
- },
- 'settings',
- )
+ assert client.post(body='01234')['status'] == 200, 'status size'
+ assert client.post(body='012345')['status'] == 413, 'status size max'
- body = '0123456789abcdef'
- resp = self.post(body=body)
- assert bool(resp), 'response from application'
- assert resp['status'] == 200, 'status'
- assert resp['body'] == body, 'body'
- body = '0123456789abcdef' * 1024 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert bool(resp), 'response from application 2'
- assert resp['status'] == 200, 'status 2'
- assert resp['body'] == body, 'body 2'
+def test_settings_max_body_size_large():
+ client.load('mirror')
- body = '0123456789abcdef' * 2 * 1024 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert bool(resp), 'response from application 3'
- assert resp['status'] == 200, 'status 3'
- assert resp['body'] == body, 'body 3'
+ assert 'success' in client.conf(
+ {'http': {'max_body_size': 32 * 1024 * 1024}}, 'settings'
+ )
- body = '0123456789abcdef' * 3 * 1024 * 1024
- resp = self.post(body=body, read_buffer_size=1024 * 1024)
- assert bool(resp), 'response from application 4'
- assert resp['status'] == 200, 'status 4'
- assert resp['body'] == body, 'body 4'
+ body = '0123456789abcdef' * 4 * 64 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert resp['status'] == 200, 'status size 4'
+ assert resp['body'] == body, 'status body 4'
- def test_settings_log_route(self):
- def count_fallbacks():
- return len(self.findall(r'"fallback" taken'))
+ body = '0123456789abcdef' * 8 * 64 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert resp['status'] == 200, 'status size 8'
+ assert resp['body'] == body, 'status body 8'
- def check_record(template):
- assert self.search_in_log(template) is not None
+ body = '0123456789abcdef' * 16 * 64 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert resp['status'] == 200, 'status size 16'
+ assert resp['body'] == body, 'status body 16'
- def check_no_record(template):
- assert self.search_in_log(template) is None
+ body = '0123456789abcdef' * 32 * 64 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert resp['status'] == 200, 'status size 32'
+ assert resp['body'] == body, 'status body 32'
- def template_req_line(url):
- return rf'\[notice\].*http request line "GET {url} HTTP/1\.1"'
- def template_selected(route):
- return rf'\[notice\].*"{route}" selected'
+@pytest.mark.skip('not yet')
+def test_settings_negative_value():
+ assert 'error' in client.conf(
+ {'http': {'max_body_size': -1}}, 'settings'
+ ), 'settings negative value'
- def template_discarded(route):
- return rf'\[info\].*"{route}" discarded'
- def wait_for_request_log(status, uri, route):
- assert self.get(url=uri)['status'] == status
- assert self.wait_for_record(template_req_line(uri)) is not None
- assert self.wait_for_record(template_selected(route)) is not None
+def test_settings_body_buffer_size():
+ client.load('mirror')
- # routes array
+ assert 'success' in client.conf(
+ {
+ 'http': {
+ 'max_body_size': 64 * 1024 * 1024,
+ 'body_buffer_size': 32 * 1024 * 1024,
+ }
+ },
+ 'settings',
+ )
+
+ body = '0123456789abcdef'
+ resp = client.post(body=body)
+ assert bool(resp), 'response from application'
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == body, 'body'
+
+ body = '0123456789abcdef' * 1024 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert bool(resp), 'response from application 2'
+ assert resp['status'] == 200, 'status 2'
+ assert resp['body'] == body, 'body 2'
+
+ body = '0123456789abcdef' * 2 * 1024 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert bool(resp), 'response from application 3'
+ assert resp['status'] == 200, 'status 3'
+ assert resp['body'] == body, 'body 3'
+
+ body = '0123456789abcdef' * 3 * 1024 * 1024
+ resp = client.post(body=body, read_buffer_size=1024 * 1024)
+ assert bool(resp), 'response from application 4'
+ assert resp['status'] == 200, 'status 4'
+ assert resp['body'] == body, 'body 4'
+
+
+def test_settings_log_route(findall, search_in_file, wait_for_record):
+ def count_fallbacks():
+ return len(findall(r'"fallback" taken'))
+
+ def check_record(template):
+ assert search_in_file(template) is not None
+
+ def check_no_record(template):
+ assert search_in_file(template) is None
+
+ def template_req_line(url):
+ return rf'\[notice\].*http request line "GET {url} HTTP/1\.1"'
+
+ def template_selected(route):
+ return rf'\[notice\].*"{route}" selected'
+
+ def template_discarded(route):
+ return rf'\[info\].*"{route}" discarded'
+
+ def wait_for_request_log(status, uri, route):
+ assert client.get(url=uri)['status'] == status
+ assert wait_for_record(template_req_line(uri)) is not None
+ assert wait_for_record(template_selected(route)) is not None
+
+ # routes array
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "match": {
+ "uri": "/zero",
+ },
+ "action": {"return": 200},
+ },
+ {
+ "action": {"return": 201},
+ },
+ ],
+ "applications": {},
+ "settings": {"http": {"log_route": True}},
+ }
+ )
+
+ wait_for_request_log(200, '/zero', 'routes/0')
+ check_no_record(r'discarded')
+
+ wait_for_request_log(201, '/one', 'routes/1')
+ check_record(template_discarded('routes/0'))
+
+ # routes object
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/main"}},
+ "routes": {
+ "main": [
{
"match": {
- "uri": "/zero",
+ "uri": "/named_route",
},
"action": {"return": 200},
},
{
"action": {"return": 201},
},
- ],
- "applications": {},
- "settings": {"http": {"log_route": True}},
- }
- )
-
- wait_for_request_log(200, '/zero', 'routes/0')
- check_no_record(r'discarded')
-
- wait_for_request_log(201, '/one', 'routes/1')
- check_record(template_discarded('routes/0'))
+ ]
+ },
+ "applications": {},
+ "settings": {"http": {"log_route": True}},
+ }
+ )
- # routes object
+ wait_for_request_log(200, '/named_route', 'routes/main/0')
+ check_no_record(template_discarded('routes/main'))
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/main"}},
- "routes": {
- "main": [
- {
- "match": {
- "uri": "/named_route",
- },
- "action": {"return": 200},
- },
- {
- "action": {"return": 201},
- },
- ]
- },
- "applications": {},
- "settings": {"http": {"log_route": True}},
- }
- )
+ wait_for_request_log(201, '/unnamed_route', 'routes/main/1')
+ check_record(template_discarded('routes/main/0'))
- wait_for_request_log(200, '/named_route', 'routes/main/0')
- check_no_record(template_discarded('routes/main'))
+ # routes sequence
- wait_for_request_log(201, '/unnamed_route', 'routes/main/1')
- check_record(template_discarded('routes/main/0'))
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/first"}},
+ "routes": {
+ "first": [
+ {
+ "action": {"pass": "routes/second"},
+ },
+ ],
+ "second": [
+ {
+ "action": {"return": 200},
+ },
+ ],
+ },
+ "applications": {},
+ "settings": {"http": {"log_route": True}},
+ }
+ )
- # routes sequence
+ wait_for_request_log(200, '/sequence', 'routes/second/0')
+ check_record(template_selected('routes/first/0'))
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/first"}},
- "routes": {
- "first": [
- {
- "action": {"pass": "routes/second"},
- },
- ],
- "second": [
- {
- "action": {"return": 200},
- },
- ],
- },
- "applications": {},
- "settings": {"http": {"log_route": True}},
- }
- )
+ # fallback
- wait_for_request_log(200, '/sequence', 'routes/second/0')
- check_record(template_selected('routes/first/0'))
-
- # fallback
-
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes/fall"}},
- "routes": {
- "fall": [
- {
- "action": {
- "share": "/blah",
- "fallback": {"pass": "routes/fall2"},
- },
- },
- ],
- "fall2": [
- {
- "action": {"return": 200},
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes/fall"}},
+ "routes": {
+ "fall": [
+ {
+ "action": {
+ "share": "/blah",
+ "fallback": {"pass": "routes/fall2"},
},
- ],
- },
- "applications": {},
- "settings": {"http": {"log_route": True}},
- }
- )
+ },
+ ],
+ "fall2": [
+ {
+ "action": {"return": 200},
+ },
+ ],
+ },
+ "applications": {},
+ "settings": {"http": {"log_route": True}},
+ }
+ )
- wait_for_request_log(200, '/', 'routes/fall2/0')
- assert count_fallbacks() == 1
- check_record(template_selected('routes/fall/0'))
+ wait_for_request_log(200, '/', 'routes/fall2/0')
+ assert count_fallbacks() == 1
+ check_record(template_selected('routes/fall/0'))
- assert self.head()['status'] == 200
- assert count_fallbacks() == 2
+ assert client.head()['status'] == 200
+ assert count_fallbacks() == 2
- # disable log
+ # disable log
- assert 'success' in self.conf({"log_route": False}, 'settings/http')
+ assert 'success' in client.conf({"log_route": False}, 'settings/http')
- url = '/disable_logging'
- assert self.get(url=url)['status'] == 200
+ url = '/disable_logging'
+ assert client.get(url=url)['status'] == 200
- time.sleep(1)
+ time.sleep(1)
- check_no_record(template_req_line(url))
+ check_no_record(template_req_line(url))
- # total
+ # total
- assert len(self.findall(r'\[notice\].*http request line')) == 7
- assert len(self.findall(r'\[notice\].*selected')) == 10
- assert len(self.findall(r'\[info\].*discarded')) == 2
+ assert len(findall(r'\[notice\].*http request line')) == 7
+ assert len(findall(r'\[notice\].*selected')) == 10
+ assert len(findall(r'\[info\].*discarded')) == 2
diff --git a/test/test_static.py b/test/test_static.py
index f7eade7c..d46247d9 100644
--- a/test/test_static.py
+++ b/test/test_static.py
@@ -2,351 +2,361 @@ import os
import socket
import pytest
-from unit.applications.proto import TestApplicationProto
-from unit.option import option
+from unit.applications.proto import ApplicationProto
from unit.utils import waitforfiles
-class TestStatic(TestApplicationProto):
- prerequisites = {}
-
- def setup_method(self):
- os.makedirs(f'{option.temp_dir}/assets/dir')
- with open(f'{option.temp_dir}/assets/index.html', 'w') as index, open(
- f'{option.temp_dir}/assets/README', 'w'
- ) as readme, open(
- f'{option.temp_dir}/assets/log.log', 'w'
- ) as log, open(
- f'{option.temp_dir}/assets/dir/file', 'w'
- ) as file:
- index.write('0123456789')
- readme.write('readme')
- log.write('[debug]')
- file.write('blah')
-
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {"action": {"share": f'{option.temp_dir}/assets$uri'}}
- ],
- "settings": {
- "http": {
- "static": {
- "mime_types": {"text/plain": [".log", "README"]}
- }
- }
- },
- }
- )
+client = ApplicationProto()
+
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir')
+ assets_dir = f'{temp_dir}/assets'
+
+ with open(f'{assets_dir}/index.html', 'w') as index, open(
+ f'{assets_dir}/README', 'w'
+ ) as readme, open(f'{assets_dir}/log.log', 'w') as log, open(
+ f'{assets_dir}/dir/file', 'w'
+ ) as file:
+ index.write('0123456789')
+ readme.write('readme')
+ log.write('[debug]')
+ file.write('blah')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{assets_dir}$uri'}}],
+ "settings": {
+ "http": {
+ "static": {"mime_types": {"text/plain": [".log", "README"]}}
+ }
+ },
+ }
+ )
- def test_static_index(self, temp_dir):
- def set_index(index):
- assert 'success' in self.conf(
- {"share": f'{temp_dir}/assets$uri', "index": index},
- 'routes/0/action',
- ), 'configure index'
-
- set_index('README')
- assert self.get()['body'] == 'readme', 'index'
-
- self.conf_delete('routes/0/action/index')
- assert self.get()['body'] == '0123456789', 'delete index'
-
- set_index('')
- assert self.get()['status'] == 404, 'index empty'
-
- def test_static_index_default(self):
- assert self.get(url='/index.html')['body'] == '0123456789', 'index'
- assert self.get(url='/')['body'] == '0123456789', 'index 2'
- assert self.get(url='//')['body'] == '0123456789', 'index 3'
- assert self.get(url='/.')['body'] == '0123456789', 'index 4'
- assert self.get(url='/./')['body'] == '0123456789', 'index 5'
- assert self.get(url='/?blah')['body'] == '0123456789', 'index vars'
- assert self.get(url='/#blah')['body'] == '0123456789', 'index anchor'
- assert self.get(url='/dir/')['status'] == 404, 'index not found'
-
- resp = self.get(url='/index.html/')
- assert resp['status'] == 404, 'index not found 2 status'
- assert (
- resp['headers']['Content-Type'] == 'text/html'
- ), 'index not found 2 Content-Type'
- def test_static_index_invalid(self, skip_alert, temp_dir):
- skip_alert(r'failed to apply new conf')
+def test_static_index(temp_dir):
+ def set_index(index):
+ assert 'success' in client.conf(
+ {"share": f'{temp_dir}/assets$uri', "index": index},
+ 'routes/0/action',
+ ), 'configure index'
- def check_index(index):
- assert 'error' in self.conf(
- {"share": f'{temp_dir}/assets$uri', "index": index},
- 'routes/0/action',
- )
+ set_index('README')
+ assert client.get()['body'] == 'readme', 'index'
- check_index({})
- check_index(['index.html', '$blah'])
+ client.conf_delete('routes/0/action/index')
+ assert client.get()['body'] == '0123456789', 'delete index'
- def test_static_large_file(self, temp_dir):
- file_size = 32 * 1024 * 1024
- with open(f'{temp_dir}/assets/large', 'wb') as f:
- f.seek(file_size - 1)
- f.write(b'\0')
+ set_index('')
+ assert client.get()['status'] == 404, 'index empty'
- assert (
- len(self.get(url='/large', read_buffer_size=1024 * 1024)['body'])
- == file_size
- ), 'large file'
- def test_static_etag(self, temp_dir):
- etag = self.get(url='/')['headers']['ETag']
- etag_2 = self.get(url='/README')['headers']['ETag']
+def test_static_index_default():
+ assert client.get(url='/index.html')['body'] == '0123456789', 'index'
+ assert client.get(url='/')['body'] == '0123456789', 'index 2'
+ assert client.get(url='//')['body'] == '0123456789', 'index 3'
+ assert client.get(url='/.')['body'] == '0123456789', 'index 4'
+ assert client.get(url='/./')['body'] == '0123456789', 'index 5'
+ assert client.get(url='/?blah')['body'] == '0123456789', 'index vars'
+ assert client.get(url='/#blah')['body'] == '0123456789', 'index anchor'
+ assert client.get(url='/dir/')['status'] == 404, 'index not found'
- assert etag != etag_2, 'different ETag'
- assert etag == self.get(url='/')['headers']['ETag'], 'same ETag'
+ resp = client.get(url='/index.html/')
+ assert resp['status'] == 404, 'index not found 2 status'
+ assert (
+ resp['headers']['Content-Type'] == 'text/html'
+ ), 'index not found 2 Content-Type'
- with open(f'{temp_dir}/assets/index.html', 'w') as f:
- f.write('blah')
- assert etag != self.get(url='/')['headers']['ETag'], 'new ETag'
+def test_static_index_invalid(skip_alert, temp_dir):
+ skip_alert(r'failed to apply new conf')
- def test_static_redirect(self):
- resp = self.get(url='/dir')
- assert resp['status'] == 301, 'redirect status'
- assert resp['headers']['Location'] == '/dir/', 'redirect Location'
- assert 'Content-Type' not in resp['headers'], 'redirect Content-Type'
+ def check_index(index):
+ assert 'error' in client.conf(
+ {"share": f'{temp_dir}/assets$uri', "index": index},
+ 'routes/0/action',
+ )
- def test_static_space_in_name(self, temp_dir):
- assets_dir = f'{temp_dir}/assets'
+ check_index({})
+ check_index(['index.html', '$blah'])
- os.rename(
- f'{assets_dir}/dir/file',
- f'{assets_dir}/dir/fi le',
- )
- assert waitforfiles(f'{assets_dir}/dir/fi le')
- assert self.get(url='/dir/fi le')['body'] == 'blah', 'file name'
- os.rename(f'{assets_dir}/dir', f'{assets_dir}/di r')
- assert waitforfiles(f'{assets_dir}/di r/fi le')
- assert self.get(url='/di r/fi le')['body'] == 'blah', 'dir name'
+def test_static_large_file(temp_dir):
+ file_size = 32 * 1024 * 1024
+ with open(f'{temp_dir}/assets/large', 'wb') as f:
+ f.seek(file_size - 1)
+ f.write(b'\0')
- os.rename(f'{assets_dir}/di r', f'{assets_dir}/ di r ')
- assert waitforfiles(f'{assets_dir}/ di r /fi le')
- assert (
- self.get(url='/ di r /fi le')['body'] == 'blah'
- ), 'dir name enclosing'
+ assert (
+ len(client.get(url='/large', read_buffer_size=1024 * 1024)['body'])
+ == file_size
+ ), 'large file'
- assert (
- self.get(url='/%20di%20r%20/fi le')['body'] == 'blah'
- ), 'dir encoded'
- assert (
- self.get(url='/ di r %2Ffi le')['body'] == 'blah'
- ), 'slash encoded'
- assert self.get(url='/ di r /fi%20le')['body'] == 'blah', 'file encoded'
- assert (
- self.get(url='/%20di%20r%20%2Ffi%20le')['body'] == 'blah'
- ), 'encoded'
- assert (
- self.get(url='/%20%64%69%20%72%20%2F%66%69%20%6C%65')['body']
- == 'blah'
- ), 'encoded 2'
- os.rename(
- f'{assets_dir}/ di r /fi le',
- f'{assets_dir}/ di r / fi le ',
- )
- assert waitforfiles(f'{assets_dir}/ di r / fi le ')
- assert (
- self.get(url='/%20di%20r%20/%20fi%20le%20')['body'] == 'blah'
- ), 'file name enclosing'
-
- try:
- open(f'{temp_dir}/ф а', 'a').close()
- utf8 = True
-
- except KeyboardInterrupt:
- raise
-
- except:
- utf8 = False
-
- if utf8:
- os.rename(
- f'{assets_dir}/ di r / fi le ',
- f'{assets_dir}/ di r /фа йл',
- )
- assert waitforfiles(f'{assets_dir}/ di r /фа йл')
- assert (
- self.get(url='/ di r /фа йл')['body'] == 'blah'
- ), 'file name 2'
-
- os.rename(
- f'{assets_dir}/ di r ',
- f'{assets_dir}/ди ректория',
- )
- assert waitforfiles(f'{assets_dir}/ди ректория/фа йл')
- assert (
- self.get(url='/ди ректория/фа йл')['body'] == 'blah'
- ), 'dir name 2'
-
- def test_static_unix_socket(self, temp_dir):
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.bind(f'{temp_dir}/assets/unix_socket')
-
- assert self.get(url='/unix_socket')['status'] == 404, 'socket'
-
- sock.close()
-
- def test_static_unix_fifo(self, temp_dir):
- os.mkfifo(f'{temp_dir}/assets/fifo')
-
- assert self.get(url='/fifo')['status'] == 404, 'fifo'
-
- def test_static_method(self):
- resp = self.head()
- assert resp['status'] == 200, 'HEAD status'
- assert resp['body'] == '', 'HEAD empty body'
-
- assert self.delete()['status'] == 405, 'DELETE'
- assert self.post()['status'] == 405, 'POST'
- assert self.put()['status'] == 405, 'PUT'
-
- def test_static_path(self):
- assert self.get(url='/dir/../dir/file')['status'] == 200, 'relative'
-
- assert self.get(url='./')['status'] == 400, 'path invalid'
- assert self.get(url='../')['status'] == 400, 'path invalid 2'
- assert self.get(url='/..')['status'] == 400, 'path invalid 3'
- assert self.get(url='../assets/')['status'] == 400, 'path invalid 4'
- assert self.get(url='/../assets/')['status'] == 400, 'path invalid 5'
-
- def test_static_two_clients(self):
- sock = self.get(no_recv=True)
- sock2 = self.get(no_recv=True)
-
- assert sock.recv(1) == b'H', 'client 1'
- assert sock2.recv(1) == b'H', 'client 2'
- assert sock.recv(1) == b'T', 'client 1 again'
- assert sock2.recv(1) == b'T', 'client 2 again'
-
- sock.close()
- sock2.close()
-
- def test_static_mime_types(self):
- assert 'success' in self.conf(
- {
- "text/x-code/x-blah/x-blah": "readme",
- "text/plain": [".html", ".log", "file"],
- },
- 'settings/http/static/mime_types',
- ), 'configure mime_types'
+def test_static_etag(temp_dir):
+ etag = client.get(url='/')['headers']['ETag']
+ etag_2 = client.get(url='/README')['headers']['ETag']
- assert (
- self.get(url='/README')['headers']['Content-Type']
- == 'text/x-code/x-blah/x-blah'
- ), 'mime_types string case insensitive'
- assert (
- self.get(url='/index.html')['headers']['Content-Type']
- == 'text/plain'
- ), 'mime_types html'
- assert (
- self.get(url='/')['headers']['Content-Type'] == 'text/plain'
- ), 'mime_types index default'
- assert (
- self.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
- ), 'mime_types file in dir'
+ assert etag != etag_2, 'different ETag'
+ assert etag == client.get(url='/')['headers']['ETag'], 'same ETag'
- def test_static_mime_types_partial_match(self):
- assert 'success' in self.conf(
- {
- "text/x-blah": ["ile", "fil", "f", "e", ".file"],
- },
- 'settings/http/static/mime_types',
- ), 'configure mime_types'
- assert 'Content-Type' not in self.get(url='/dir/file'), 'partial match'
-
- def test_static_mime_types_reconfigure(self):
- assert 'success' in self.conf(
- {
- "text/x-code": "readme",
- "text/plain": [".html", ".log", "file"],
- },
- 'settings/http/static/mime_types',
- ), 'configure mime_types'
+ with open(f'{temp_dir}/assets/index.html', 'w') as f:
+ f.write('blah')
- assert self.conf_get('settings/http/static/mime_types') == {
- 'text/x-code': 'readme',
- 'text/plain': ['.html', '.log', 'file'],
- }, 'mime_types get'
- assert (
- self.conf_get('settings/http/static/mime_types/text%2Fx-code')
- == 'readme'
- ), 'mime_types get string'
- assert self.conf_get(
- 'settings/http/static/mime_types/text%2Fplain'
- ) == ['.html', '.log', 'file'], 'mime_types get array'
- assert (
- self.conf_get('settings/http/static/mime_types/text%2Fplain/1')
- == '.log'
- ), 'mime_types get array element'
+ assert etag != client.get(url='/')['headers']['ETag'], 'new ETag'
- assert 'success' in self.conf_delete(
- 'settings/http/static/mime_types/text%2Fplain/2'
- ), 'mime_types remove array element'
- assert (
- 'Content-Type' not in self.get(url='/dir/file')['headers']
- ), 'mime_types removed'
- assert 'success' in self.conf_post(
- '"file"', 'settings/http/static/mime_types/text%2Fplain'
- ), 'mime_types add array element'
- assert (
- self.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
- ), 'mime_types reverted'
+def test_static_redirect():
+ resp = client.get(url='/dir')
+ assert resp['status'] == 301, 'redirect status'
+ assert resp['headers']['Location'] == '/dir/', 'redirect Location'
+ assert 'Content-Type' not in resp['headers'], 'redirect Content-Type'
- assert 'success' in self.conf(
- '"file"', 'settings/http/static/mime_types/text%2Fplain'
- ), 'configure mime_types update'
- assert (
- self.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
- ), 'mime_types updated'
- assert (
- 'Content-Type' not in self.get(url='/log.log')['headers']
- ), 'mime_types updated 2'
- assert 'success' in self.conf(
- '".log"', 'settings/http/static/mime_types/text%2Fblahblahblah'
- ), 'configure mime_types create'
+def test_static_space_in_name(temp_dir):
+ assets_dir = f'{temp_dir}/assets'
+
+ os.rename(
+ f'{assets_dir}/dir/file',
+ f'{assets_dir}/dir/fi le',
+ )
+ assert waitforfiles(f'{assets_dir}/dir/fi le')
+ assert client.get(url='/dir/fi le')['body'] == 'blah', 'file name'
+
+ os.rename(f'{assets_dir}/dir', f'{assets_dir}/di r')
+ assert waitforfiles(f'{assets_dir}/di r/fi le')
+ assert client.get(url='/di r/fi le')['body'] == 'blah', 'dir name'
+
+ os.rename(f'{assets_dir}/di r', f'{assets_dir}/ di r ')
+ assert waitforfiles(f'{assets_dir}/ di r /fi le')
+ assert (
+ client.get(url='/ di r /fi le')['body'] == 'blah'
+ ), 'dir name enclosing'
+
+ assert (
+ client.get(url='/%20di%20r%20/fi le')['body'] == 'blah'
+ ), 'dir encoded'
+ assert client.get(url='/ di r %2Ffi le')['body'] == 'blah', 'slash encoded'
+ assert client.get(url='/ di r /fi%20le')['body'] == 'blah', 'file encoded'
+ assert (
+ client.get(url='/%20di%20r%20%2Ffi%20le')['body'] == 'blah'
+ ), 'encoded'
+ assert (
+ client.get(url='/%20%64%69%20%72%20%2F%66%69%20%6C%65')['body']
+ == 'blah'
+ ), 'encoded 2'
+
+ os.rename(
+ f'{assets_dir}/ di r /fi le',
+ f'{assets_dir}/ di r / fi le ',
+ )
+ assert waitforfiles(f'{assets_dir}/ di r / fi le ')
+ assert (
+ client.get(url='/%20di%20r%20/%20fi%20le%20')['body'] == 'blah'
+ ), 'file name enclosing'
+
+ try:
+ open(f'{temp_dir}/ф а', 'a').close()
+ utf8 = True
+
+ except KeyboardInterrupt:
+ raise
+
+ except:
+ utf8 = False
+
+ if utf8:
+ os.rename(
+ f'{assets_dir}/ di r / fi le ',
+ f'{assets_dir}/ di r /фа йл',
+ )
+ assert waitforfiles(f'{assets_dir}/ di r /фа йл')
+ assert client.get(url='/ di r /фа йл')['body'] == 'blah'
+
+ os.rename(
+ f'{assets_dir}/ di r ',
+ f'{assets_dir}/ди ректория',
+ )
+ assert waitforfiles(f'{assets_dir}/ди ректория/фа йл')
assert (
- self.get(url='/log.log')['headers']['Content-Type']
- == 'text/blahblahblah'
- ), 'mime_types create'
-
- def test_static_mime_types_correct(self):
- assert 'error' in self.conf(
- {"text/x-code": "readme", "text/plain": "readme"},
- 'settings/http/static/mime_types',
- ), 'mime_types same extensions'
- assert 'error' in self.conf(
- {"text/x-code": [".h", ".c"], "text/plain": ".c"},
- 'settings/http/static/mime_types',
- ), 'mime_types same extensions array'
- assert 'error' in self.conf(
- {
- "text/x-code": [".h", ".c", "readme"],
- "text/plain": "README",
- },
- 'settings/http/static/mime_types',
- ), 'mime_types same extensions case insensitive'
+ client.get(url='/ди ректория/фа йл')['body'] == 'blah'
+ ), 'dir name 2'
- @pytest.mark.skip('not yet')
- def test_static_mime_types_invalid(self, temp_dir):
- assert 'error' in self.http(
- b"""PUT /config/settings/http/static/mime_types/%0%00% HTTP/1.1\r
+
+def test_static_unix_socket(temp_dir):
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.bind(f'{temp_dir}/assets/unix_socket')
+
+ assert client.get(url='/unix_socket')['status'] == 404, 'socket'
+
+ sock.close()
+
+
+def test_static_unix_fifo(temp_dir):
+ os.mkfifo(f'{temp_dir}/assets/fifo')
+
+ assert client.get(url='/fifo')['status'] == 404, 'fifo'
+
+
+def test_static_method():
+ resp = client.head()
+ assert resp['status'] == 200, 'HEAD status'
+ assert resp['body'] == '', 'HEAD empty body'
+
+ assert client.delete()['status'] == 405, 'DELETE'
+ assert client.post()['status'] == 405, 'POST'
+ assert client.put()['status'] == 405, 'PUT'
+
+
+def test_static_path():
+ assert client.get(url='/dir/../dir/file')['status'] == 200, 'relative'
+
+ assert client.get(url='./')['status'] == 400, 'path invalid'
+ assert client.get(url='../')['status'] == 400, 'path invalid 2'
+ assert client.get(url='/..')['status'] == 400, 'path invalid 3'
+ assert client.get(url='../assets/')['status'] == 400, 'path invalid 4'
+ assert client.get(url='/../assets/')['status'] == 400, 'path invalid 5'
+
+
+def test_static_two_clients():
+ sock = client.get(no_recv=True)
+ sock2 = client.get(no_recv=True)
+
+ assert sock.recv(1) == b'H', 'client 1'
+ assert sock2.recv(1) == b'H', 'client 2'
+ assert sock.recv(1) == b'T', 'client 1 again'
+ assert sock2.recv(1) == b'T', 'client 2 again'
+
+ sock.close()
+ sock2.close()
+
+
+def test_static_mime_types():
+ assert 'success' in client.conf(
+ {
+ "text/x-code/x-blah/x-blah": "readme",
+ "text/plain": [".html", ".log", "file"],
+ },
+ 'settings/http/static/mime_types',
+ ), 'configure mime_types'
+
+ assert (
+ client.get(url='/README')['headers']['Content-Type']
+ == 'text/x-code/x-blah/x-blah'
+ ), 'mime_types string case insensitive'
+ assert (
+ client.get(url='/index.html')['headers']['Content-Type'] == 'text/plain'
+ ), 'mime_types html'
+ assert (
+ client.get(url='/')['headers']['Content-Type'] == 'text/plain'
+ ), 'mime_types index default'
+ assert (
+ client.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
+ ), 'mime_types file in dir'
+
+
+def test_static_mime_types_partial_match():
+ assert 'success' in client.conf(
+ {
+ "text/x-blah": ["ile", "fil", "f", "e", ".file"],
+ },
+ 'settings/http/static/mime_types',
+ ), 'configure mime_types'
+ assert 'Content-Type' not in client.get(url='/dir/file'), 'partial match'
+
+
+def test_static_mime_types_reconfigure():
+ assert 'success' in client.conf(
+ {
+ "text/x-code": "readme",
+ "text/plain": [".html", ".log", "file"],
+ },
+ 'settings/http/static/mime_types',
+ ), 'configure mime_types'
+
+ assert client.conf_get('settings/http/static/mime_types') == {
+ 'text/x-code': 'readme',
+ 'text/plain': ['.html', '.log', 'file'],
+ }, 'mime_types get'
+ assert (
+ client.conf_get('settings/http/static/mime_types/text%2Fx-code')
+ == 'readme'
+ ), 'mime_types get string'
+ assert client.conf_get('settings/http/static/mime_types/text%2Fplain') == [
+ '.html',
+ '.log',
+ 'file',
+ ], 'mime_types get array'
+ assert (
+ client.conf_get('settings/http/static/mime_types/text%2Fplain/1')
+ == '.log'
+ ), 'mime_types get array element'
+
+ assert 'success' in client.conf_delete(
+ 'settings/http/static/mime_types/text%2Fplain/2'
+ ), 'mime_types remove array element'
+ assert (
+ 'Content-Type' not in client.get(url='/dir/file')['headers']
+ ), 'mime_types removed'
+
+ assert 'success' in client.conf_post(
+ '"file"', 'settings/http/static/mime_types/text%2Fplain'
+ ), 'mime_types add array element'
+ assert (
+ client.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
+ ), 'mime_types reverted'
+
+ assert 'success' in client.conf(
+ '"file"', 'settings/http/static/mime_types/text%2Fplain'
+ ), 'configure mime_types update'
+ assert (
+ client.get(url='/dir/file')['headers']['Content-Type'] == 'text/plain'
+ ), 'mime_types updated'
+ assert (
+ 'Content-Type' not in client.get(url='/log.log')['headers']
+ ), 'mime_types updated 2'
+
+ assert 'success' in client.conf(
+ '".log"', 'settings/http/static/mime_types/text%2Fblahblahblah'
+ ), 'configure mime_types create'
+ assert (
+ client.get(url='/log.log')['headers']['Content-Type']
+ == 'text/blahblahblah'
+ ), 'mime_types create'
+
+
+def test_static_mime_types_correct():
+ assert 'error' in client.conf(
+ {"text/x-code": "readme", "text/plain": "readme"},
+ 'settings/http/static/mime_types',
+ ), 'mime_types same extensions'
+ assert 'error' in client.conf(
+ {"text/x-code": [".h", ".c"], "text/plain": ".c"},
+ 'settings/http/static/mime_types',
+ ), 'mime_types same extensions array'
+ assert 'error' in client.conf(
+ {
+ "text/x-code": [".h", ".c", "readme"],
+ "text/plain": "README",
+ },
+ 'settings/http/static/mime_types',
+ ), 'mime_types same extensions case insensitive'
+
+
+@pytest.mark.skip('not yet')
+def test_static_mime_types_invalid(temp_dir):
+ assert 'error' in client.http(
+ b"""PUT /config/settings/http/static/mime_types/%0%00% HTTP/1.1\r
Host: localhost\r
Connection: close\r
Content-Length: 6\r
\r
\"blah\"""",
- raw_resp=True,
- raw=True,
- sock_type='unix',
- addr=f'{temp_dir}/control.unit.sock',
- ), 'mime_types invalid'
+ raw_resp=True,
+ raw=True,
+ sock_type='unix',
+ addr=f'{temp_dir}/control.unit.sock',
+ ), 'mime_types invalid'
diff --git a/test/test_static_chroot.py b/test/test_static_chroot.py
index c5a35d82..6b4dd89a 100644
--- a/test/test_static_chroot.py
+++ b/test/test_static_chroot.py
@@ -2,150 +2,162 @@ import os
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
+prerequisites = {'features': {'chroot': True}}
-class TestStaticChroot(TestApplicationProto):
- prerequisites = {'features': ['chroot']}
+client = ApplicationProto()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- os.makedirs(f'{temp_dir}/assets/dir')
- Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
- Path(f'{temp_dir}/assets/dir/file').write_text('blah')
- self.test_path = f'/{os.path.relpath(Path(__file__))}'
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir')
+ Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
+ Path(f'{temp_dir}/assets/dir/file').write_text('blah')
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- }
- )
+ client.test_path = f'/{os.path.relpath(Path(__file__))}'
- def update_action(self, chroot, share=f'{option.temp_dir}/assets$uri'):
- return self.conf(
- {'chroot': chroot, 'share': share},
- 'routes/0/action',
- )
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ }
+ )
- def get_custom(self, uri, host):
- return self.get(url=uri, headers={'Host': host, 'Connection': 'close'})[
- 'status'
- ]
- def test_static_chroot(self, temp_dir):
- assert self.get(url='/dir/file')['status'] == 200, 'default chroot'
- assert self.get(url='/index.html')['status'] == 200, 'default chroot 2'
+def update_action(chroot, share=f'{option.temp_dir}/assets$uri'):
+ return client.conf(
+ {'chroot': chroot, 'share': share},
+ 'routes/0/action',
+ )
- assert 'success' in self.update_action(f'{temp_dir}/assets/dir')
-
- assert self.get(url='/dir/file')['status'] == 200, 'chroot'
- assert self.get(url='/index.html')['status'] == 403, 'chroot 403 2'
- assert self.get(url='/file')['status'] == 403, 'chroot 403'
-
- def test_share_chroot_array(self, temp_dir):
- assert 'success' in self.update_action(
- f'{temp_dir}/assets/dir', ["/blah", f'{temp_dir}/assets$uri']
- )
- assert self.get(url='/dir/file')['status'] == 200, 'share array'
-
- assert 'success' in self.update_action(
- f'{temp_dir}/assets/$host',
- ['/blah', f'{temp_dir}/assets$uri'],
- )
- assert self.get_custom('/dir/file', 'dir') == 200, 'array variable'
-
- assert 'success' in self.update_action(
- f'{temp_dir}/assets/dir', ['/blah', '/blah2']
- )
- assert self.get()['status'] != 200, 'share array bad'
-
- def test_static_chroot_permission(self, is_su, temp_dir):
- if is_su:
- pytest.skip("does't work under root")
-
- os.chmod(f'{temp_dir}/assets/dir', 0o100)
-
- assert 'success' in self.update_action(
- f'{temp_dir}/assets/dir'
- ), 'configure chroot'
-
- assert self.get(url='/dir/file')['status'] == 200, 'chroot'
-
- def test_static_chroot_empty(self, temp_dir):
- assert 'success' in self.update_action('')
- assert self.get(url='/dir/file')['status'] == 200, 'empty absolute'
-
- assert 'success' in self.update_action("", ".$uri")
- assert self.get(url=self.test_path)['status'] == 200, 'empty relative'
-
- def test_static_chroot_relative(self, is_su, temp_dir):
- if is_su:
- pytest.skip("Does't work under root.")
-
- assert 'success' in self.update_action('.')
- assert self.get(url='/dir/file')['status'] == 403, 'relative chroot'
-
- assert 'success' in self.conf({"share": ".$uri"}, 'routes/0/action')
- assert self.get(url=self.test_path)['status'] == 200, 'relative share'
-
- assert 'success' in self.update_action(".", ".$uri")
- assert self.get(url=self.test_path)['status'] == 200, 'relative'
-
- def test_static_chroot_variables(self, temp_dir):
- assert 'success' in self.update_action(f'{temp_dir}/assets/$host')
- assert self.get_custom('/dir/file', 'dir') == 200
-
- assert 'success' in self.update_action(f'{temp_dir}/assets/${{host}}')
- assert self.get_custom('/dir/file', 'dir') == 200
-
- def test_static_chroot_variables_buildin_start(self, temp_dir):
- assert 'success' in self.update_action(
- '$uri/assets/dir',
- f'{temp_dir}/assets/dir/$host',
- )
- assert self.get_custom(temp_dir, 'file') == 200
-
- def test_static_chroot_variables_buildin_mid(self, temp_dir):
- assert 'success' in self.update_action(f'{temp_dir}/$host/dir')
- assert self.get_custom('/dir/file', 'assets') == 200
-
- def test_static_chroot_variables_buildin_end(self, temp_dir):
- assert 'success' in self.update_action(f'{temp_dir}/assets/$host')
- assert self.get_custom('/dir/file', 'dir') == 200
-
- def test_static_chroot_slash(self, temp_dir):
- assert 'success' in self.update_action(f'{temp_dir}/assets/dir/')
- assert self.get(url='/dir/file')['status'] == 200, 'slash end'
- assert self.get(url='/dirxfile')['status'] == 403, 'slash end bad'
-
- assert 'success' in self.update_action(f'{temp_dir}/assets/dir')
- assert self.get(url='/dir/file')['status'] == 200, 'no slash end'
-
- assert 'success' in self.update_action(f'{temp_dir}/assets/dir/')
- assert self.get(url='/dir/file')['status'] == 200, 'slash end 2'
- assert self.get(url='/dirxfile')['status'] == 403, 'slash end 2 bad'
-
- assert 'success' in self.update_action(
- f'{temp_dir}//assets////dir///', f'{temp_dir}///assets/////$uri'
- )
- assert self.get(url='/dir/file')['status'] == 200, 'multiple slashes'
-
- def test_static_chroot_invalid(self, temp_dir):
- assert 'error' in self.conf(
- {"share": temp_dir, "chroot": True},
- 'routes/0/action',
- ), 'configure chroot error'
- assert 'error' in self.conf(
- {"share": temp_dir, "symlinks": "True"},
- 'routes/0/action',
- ), 'configure symlink error'
- assert 'error' in self.conf(
- {"share": temp_dir, "mount": "True"},
- 'routes/0/action',
- ), 'configure mount error'
-
- assert 'error' in self.update_action(f'{temp_dir}/assets/d$r$uri')
- assert 'error' in self.update_action(f'{temp_dir}/assets/$$uri')
+
+def get_custom(uri, host):
+ return client.get(url=uri, headers={'Host': host, 'Connection': 'close'})[
+ 'status'
+ ]
+
+
+def test_static_chroot(temp_dir):
+ assert client.get(url='/dir/file')['status'] == 200, 'default chroot'
+ assert client.get(url='/index.html')['status'] == 200, 'default chroot 2'
+
+ assert 'success' in update_action(f'{temp_dir}/assets/dir')
+
+ assert client.get(url='/dir/file')['status'] == 200, 'chroot'
+ assert client.get(url='/index.html')['status'] == 403, 'chroot 403 2'
+ assert client.get(url='/file')['status'] == 403, 'chroot 403'
+
+
+def test_share_chroot_array(temp_dir):
+ assert 'success' in update_action(
+ f'{temp_dir}/assets/dir', ["/blah", f'{temp_dir}/assets$uri']
+ )
+ assert client.get(url='/dir/file')['status'] == 200, 'share array'
+
+ assert 'success' in update_action(
+ f'{temp_dir}/assets/$host',
+ ['/blah', f'{temp_dir}/assets$uri'],
+ )
+ assert get_custom('/dir/file', 'dir') == 200, 'array variable'
+
+ assert 'success' in update_action(
+ f'{temp_dir}/assets/dir', ['/blah', '/blah2']
+ )
+ assert client.get()['status'] != 200, 'share array bad'
+
+
+def test_static_chroot_permission(require, temp_dir):
+ require({'privileged_user': False})
+
+ os.chmod(f'{temp_dir}/assets/dir', 0o100)
+
+ assert 'success' in update_action(
+ f'{temp_dir}/assets/dir'
+ ), 'configure chroot'
+
+ assert client.get(url='/dir/file')['status'] == 200, 'chroot'
+
+
+def test_static_chroot_empty():
+ assert 'success' in update_action('')
+ assert client.get(url='/dir/file')['status'] == 200, 'empty absolute'
+
+ assert 'success' in update_action("", ".$uri")
+ assert client.get(url=client.test_path)['status'] == 200, 'empty relative'
+
+
+def test_static_chroot_relative(require):
+ require({'privileged_user': False})
+
+ assert 'success' in update_action('.')
+ assert client.get(url='/dir/file')['status'] == 403, 'relative chroot'
+
+ assert 'success' in client.conf({"share": ".$uri"}, 'routes/0/action')
+ assert client.get(url=client.test_path)['status'] == 200, 'relative share'
+
+ assert 'success' in update_action(".", ".$uri")
+ assert client.get(url=client.test_path)['status'] == 200, 'relative'
+
+
+def test_static_chroot_variables(temp_dir):
+ assert 'success' in update_action(f'{temp_dir}/assets/$host')
+ assert get_custom('/dir/file', 'dir') == 200
+
+ assert 'success' in update_action(f'{temp_dir}/assets/${{host}}')
+ assert get_custom('/dir/file', 'dir') == 200
+
+
+def test_static_chroot_variables_buildin_start(temp_dir):
+ assert 'success' in update_action(
+ '$uri/assets/dir',
+ f'{temp_dir}/assets/dir/$host',
+ )
+ assert get_custom(temp_dir, 'file') == 200
+
+
+def test_static_chroot_variables_buildin_mid(temp_dir):
+ assert 'success' in update_action(f'{temp_dir}/$host/dir')
+ assert get_custom('/dir/file', 'assets') == 200
+
+
+def test_static_chroot_variables_buildin_end(temp_dir):
+ assert 'success' in update_action(f'{temp_dir}/assets/$host')
+ assert get_custom('/dir/file', 'dir') == 200
+
+
+def test_static_chroot_slash(temp_dir):
+ assert 'success' in update_action(f'{temp_dir}/assets/dir/')
+ assert client.get(url='/dir/file')['status'] == 200, 'slash end'
+ assert client.get(url='/dirxfile')['status'] == 403, 'slash end bad'
+
+ assert 'success' in update_action(f'{temp_dir}/assets/dir')
+ assert client.get(url='/dir/file')['status'] == 200, 'no slash end'
+
+ assert 'success' in update_action(f'{temp_dir}/assets/dir/')
+ assert client.get(url='/dir/file')['status'] == 200, 'slash end 2'
+ assert client.get(url='/dirxfile')['status'] == 403, 'slash end 2 bad'
+
+ assert 'success' in update_action(
+ f'{temp_dir}//assets////dir///', f'{temp_dir}///assets/////$uri'
+ )
+ assert client.get(url='/dir/file')['status'] == 200, 'multiple slashes'
+
+
+def test_static_chroot_invalid(temp_dir):
+ assert 'error' in client.conf(
+ {"share": temp_dir, "chroot": True},
+ 'routes/0/action',
+ ), 'configure chroot error'
+ assert 'error' in client.conf(
+ {"share": temp_dir, "symlinks": "True"},
+ 'routes/0/action',
+ ), 'configure symlink error'
+ assert 'error' in client.conf(
+ {"share": temp_dir, "mount": "True"},
+ 'routes/0/action',
+ ), 'configure mount error'
+
+ assert 'error' in update_action(f'{temp_dir}/assets/d$r$uri')
+ assert 'error' in update_action(f'{temp_dir}/assets/$$uri')
diff --git a/test/test_static_fallback.py b/test/test_static_fallback.py
index 75012bbb..ffc888ab 100644
--- a/test/test_static_fallback.py
+++ b/test/test_static_fallback.py
@@ -2,151 +2,156 @@ import os
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
+client = ApplicationProto()
-class TestStaticFallback(TestApplicationProto):
- prerequisites = {}
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- assets_dir = f'{temp_dir}/assets'
- os.makedirs(f'{assets_dir}/dir')
- Path(f'{assets_dir}/index.html').write_text('0123456789')
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ assets_dir = f'{temp_dir}/assets'
+ os.makedirs(f'{assets_dir}/dir')
+ Path(f'{assets_dir}/index.html').write_text('0123456789')
- os.makedirs(f'{assets_dir}/403')
- os.chmod(f'{assets_dir}/403', 0o000)
+ os.makedirs(f'{assets_dir}/403')
+ os.chmod(f'{assets_dir}/403', 0o000)
- self._load_conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "routes"},
- },
- "routes": [{"action": {"share": f'{assets_dir}$uri'}}],
- "applications": {},
- }
- )
-
- yield
-
- try:
- os.chmod(f'{assets_dir}/403', 0o777)
- except FileNotFoundError:
- pass
-
- def action_update(self, conf):
- assert 'success' in self.conf(conf, 'routes/0/action')
-
- def test_static_fallback(self):
- self.action_update({"share": "/blah"})
- assert self.get()['status'] == 404, 'bad path no fallback'
-
- self.action_update({"share": "/blah", "fallback": {"return": 200}})
-
- resp = self.get()
- assert resp['status'] == 200, 'bad path fallback status'
- assert resp['body'] == '', 'bad path fallback'
-
- def test_static_fallback_valid_path(self, temp_dir):
- self.action_update(
- {"share": f"{temp_dir}/assets$uri", "fallback": {"return": 200}}
- )
- resp = self.get()
- assert resp['status'] == 200, 'fallback status'
- assert resp['body'] == '0123456789', 'fallback'
-
- resp = self.get(url='/403/')
- assert resp['status'] == 200, 'fallback status 403'
- assert resp['body'] == '', 'fallback 403'
-
- resp = self.post()
- assert resp['status'] == 200, 'fallback status 405'
- assert resp['body'] == '', 'fallback 405'
-
- assert self.get(url='/dir')['status'] == 301, 'fallback status 301'
-
- def test_static_fallback_nested(self):
- self.action_update(
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "routes"},
+ },
+ "routes": [{"action": {"share": f'{assets_dir}$uri'}}],
+ "applications": {},
+ }
+ )
+
+ yield
+
+ try:
+ os.chmod(f'{assets_dir}/403', 0o777)
+ except FileNotFoundError:
+ pass
+
+
+def action_update(conf):
+ assert 'success' in client.conf(conf, 'routes/0/action')
+
+
+def test_static_fallback():
+ action_update({"share": "/blah"})
+ assert client.get()['status'] == 404, 'bad path no fallback'
+
+ action_update({"share": "/blah", "fallback": {"return": 200}})
+
+ resp = client.get()
+ assert resp['status'] == 200, 'bad path fallback status'
+ assert resp['body'] == '', 'bad path fallback'
+
+
+def test_static_fallback_valid_path(temp_dir):
+ action_update(
+ {"share": f"{temp_dir}/assets$uri", "fallback": {"return": 200}}
+ )
+ resp = client.get()
+ assert resp['status'] == 200, 'fallback status'
+ assert resp['body'] == '0123456789', 'fallback'
+
+ resp = client.get(url='/403/')
+ assert resp['status'] == 200, 'fallback status 403'
+ assert resp['body'] == '', 'fallback 403'
+
+ resp = client.post()
+ assert resp['status'] == 200, 'fallback status 405'
+ assert resp['body'] == '', 'fallback 405'
+
+ assert client.get(url='/dir')['status'] == 301, 'fallback status 301'
+
+
+def test_static_fallback_nested():
+ action_update(
+ {
+ "share": "/blah",
+ "fallback": {
+ "share": "/blah/blah",
+ "fallback": {"return": 200},
+ },
+ }
+ )
+
+ resp = client.get()
+ assert resp['status'] == 200, 'fallback nested status'
+ assert resp['body'] == '', 'fallback nested'
+
+
+def test_static_fallback_share(temp_dir):
+ action_update(
+ {
+ "share": "/blah",
+ "fallback": {"share": f"{temp_dir}/assets$uri"},
+ }
+ )
+
+ resp = client.get()
+ assert resp['status'] == 200, 'fallback share status'
+ assert resp['body'] == '0123456789', 'fallback share'
+
+ resp = client.head()
+ assert resp['status'] == 200, 'fallback share status HEAD'
+ assert resp['body'] == '', 'fallback share HEAD'
+
+ assert client.get(url='/dir')['status'] == 301, 'fallback share status 301'
+
+
+def test_static_fallback_proxy():
+ assert 'success' in client.conf(
+ [
{
- "share": "/blah",
- "fallback": {
- "share": "/blah/blah",
- "fallback": {"return": 200},
- },
- }
- )
-
- resp = self.get()
- assert resp['status'] == 200, 'fallback nested status'
- assert resp['body'] == '', 'fallback nested'
-
- def test_static_fallback_share(self, temp_dir):
- self.action_update(
+ "match": {"destination": "*:7081"},
+ "action": {"return": 200},
+ },
{
- "share": "/blah",
- "fallback": {"share": f"{temp_dir}/assets$uri"},
- }
- )
-
- resp = self.get()
- assert resp['status'] == 200, 'fallback share status'
- assert resp['body'] == '0123456789', 'fallback share'
-
- resp = self.head()
- assert resp['status'] == 200, 'fallback share status HEAD'
- assert resp['body'] == '', 'fallback share HEAD'
-
- assert (
- self.get(url='/dir')['status'] == 301
- ), 'fallback share status 301'
-
- def test_static_fallback_proxy(self):
- assert 'success' in self.conf(
- [
- {
- "match": {"destination": "*:7081"},
- "action": {"return": 200},
- },
- {
- "action": {
- "share": "/blah",
- "fallback": {"proxy": "http://127.0.0.1:7081"},
- }
- },
- ],
- 'routes',
- ), 'configure fallback proxy route'
-
- resp = self.get()
- assert resp['status'] == 200, 'fallback proxy status'
- assert resp['body'] == '', 'fallback proxy'
-
- @pytest.mark.skip('not yet')
- def test_static_fallback_proxy_loop(self, skip_alert):
- skip_alert(
- r'open.*/blah/index.html.*failed',
- r'accept.*failed',
- r'socket.*failed',
- r'new connections are not accepted',
- )
-
- self.action_update(
- {"share": "/blah", "fallback": {"proxy": "http://127.0.0.1:7080"}}
- )
- self.get(no_recv=True)
-
- assert 'success' in self.conf_delete('listeners/*:7081')
- self.get(read_timeout=1)
-
- def test_static_fallback_invalid(self):
- def check_error(conf):
- assert 'error' in self.conf(conf, 'routes/0/action')
-
- check_error({"share": "/blah", "fallback": {}})
- check_error({"share": "/blah", "fallback": ""})
- check_error({"return": 200, "fallback": {"share": "/blah"}})
- check_error(
- {"proxy": "http://127.0.0.1:7081", "fallback": {"share": "/blah"}}
- )
- check_error({"fallback": {"share": "/blah"}})
+ "action": {
+ "share": "/blah",
+ "fallback": {"proxy": "http://127.0.0.1:7081"},
+ }
+ },
+ ],
+ 'routes',
+ ), 'configure fallback proxy route'
+
+ resp = client.get()
+ assert resp['status'] == 200, 'fallback proxy status'
+ assert resp['body'] == '', 'fallback proxy'
+
+
+@pytest.mark.skip('not yet')
+def test_static_fallback_proxy_loop(skip_alert):
+ skip_alert(
+ r'open.*/blah/index.html.*failed',
+ r'accept.*failed',
+ r'socket.*failed',
+ r'new connections are not accepted',
+ )
+
+ action_update(
+ {"share": "/blah", "fallback": {"proxy": "http://127.0.0.1:7080"}}
+ )
+ client.get(no_recv=True)
+
+ assert 'success' in client.conf_delete('listeners/*:7081')
+ client.get(read_timeout=1)
+
+
+def test_static_fallback_invalid():
+ def check_error(conf):
+ assert 'error' in client.conf(conf, 'routes/0/action')
+
+ check_error({"share": "/blah", "fallback": {}})
+ check_error({"share": "/blah", "fallback": ""})
+ check_error({"return": 200, "fallback": {"share": "/blah"}})
+ check_error(
+ {"proxy": "http://127.0.0.1:7081", "fallback": {"share": "/blah"}}
+ )
+ check_error({"fallback": {"share": "/blah"}})
diff --git a/test/test_static_mount.py b/test/test_static_mount.py
index 406922b1..ccd18919 100644
--- a/test/test_static_mount.py
+++ b/test/test_static_mount.py
@@ -3,133 +3,134 @@ import subprocess
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
-
-
-class TestStaticMount(TestApplicationProto):
- prerequisites = {'features': ['chroot']}
-
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, is_su, temp_dir):
- if not is_su:
- pytest.skip('requires root')
-
- os.makedirs(f'{temp_dir}/assets/dir/mount')
- os.makedirs(f'{temp_dir}/assets/dir/dir')
- os.makedirs(f'{temp_dir}/assets/mount')
- Path(f'{temp_dir}/assets/index.html').write_text('index')
- Path(f'{temp_dir}/assets/dir/dir/file').write_text('file')
- Path(f'{temp_dir}/assets/mount/index.html').write_text('mount')
-
- try:
- subprocess.check_output(
- [
- "mount",
- "--bind",
- f'{temp_dir}/assets/mount',
- f'{temp_dir}/assets/dir/mount',
- ],
- stderr=subprocess.STDOUT,
- )
-
- except KeyboardInterrupt:
- raise
-
- except subprocess.CalledProcessError:
- pytest.fail("Can't run mount process.")
-
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets/dir$uri'}}],
- }
+from unit.applications.proto import ApplicationProto
+
+prerequisites = {'features': {'chroot': True}, 'privileged_user': True}
+
+client = ApplicationProto()
+
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir/mount')
+ os.makedirs(f'{temp_dir}/assets/dir/dir')
+ os.makedirs(f'{temp_dir}/assets/mount')
+ Path(f'{temp_dir}/assets/index.html').write_text('index')
+ Path(f'{temp_dir}/assets/dir/dir/file').write_text('file')
+ Path(f'{temp_dir}/assets/mount/index.html').write_text('mount')
+
+ try:
+ subprocess.check_output(
+ [
+ "mount",
+ "--bind",
+ f'{temp_dir}/assets/mount',
+ f'{temp_dir}/assets/dir/mount',
+ ],
+ stderr=subprocess.STDOUT,
)
- yield
+ except KeyboardInterrupt:
+ raise
- try:
- subprocess.check_output(
- ["umount", "--lazy", f'{temp_dir}/assets/dir/mount'],
- stderr=subprocess.STDOUT,
- )
+ except subprocess.CalledProcessError:
+ pytest.fail("Can't run mount process.")
- except KeyboardInterrupt:
- raise
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets/dir$uri'}}],
+ }
+ )
- except subprocess.CalledProcessError:
- pytest.fail("Can't run umount process.")
+ yield
- def test_static_mount(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
+ try:
+ subprocess.check_output(
+ ["umount", "--lazy", f'{temp_dir}/assets/dir/mount'],
+ stderr=subprocess.STDOUT,
+ )
- resp = self.get(url='/mount/')
- assert resp['status'] == 200
- assert resp['body'] == 'mount'
+ except KeyboardInterrupt:
+ raise
- assert 'success' in self.conf(
- {"share": f'{temp_dir}/assets/dir$uri', "traverse_mounts": False},
- 'routes/0/action',
- ), 'configure mount disable'
+ except subprocess.CalledProcessError:
+ pytest.fail("Can't run umount process.")
- assert self.get(url='/mount/')['status'] == 403
- assert 'success' in self.conf(
- {"share": f'{temp_dir}/assets/dir$uri', "traverse_mounts": True},
- 'routes/0/action',
- ), 'configure mount enable'
+def test_static_mount(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
- resp = self.get(url='/mount/')
- assert resp['status'] == 200
- assert resp['body'] == 'mount'
+ resp = client.get(url='/mount/')
+ assert resp['status'] == 200
+ assert resp['body'] == 'mount'
- def test_static_mount_two_blocks(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
+ assert 'success' in client.conf(
+ {"share": f'{temp_dir}/assets/dir$uri', "traverse_mounts": False},
+ 'routes/0/action',
+ ), 'configure mount disable'
- os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
+ assert client.get(url='/mount/')['status'] == 403
- assert 'success' in self.conf(
- [
- {
- "match": {"method": "HEAD"},
- "action": {
- "share": f'{temp_dir}/assets/dir$uri',
- "traverse_mounts": False,
- },
- },
- {
- "match": {"method": "GET"},
- "action": {
- "share": f'{temp_dir}/assets/dir$uri',
- "traverse_mounts": True,
- },
- },
- ],
- 'routes',
- ), 'configure two options'
+ assert 'success' in client.conf(
+ {"share": f'{temp_dir}/assets/dir$uri', "traverse_mounts": True},
+ 'routes/0/action',
+ ), 'configure mount enable'
- assert self.get(url='/mount/')['status'] == 200, 'block enabled'
- assert self.head(url='/mount/')['status'] == 403, 'block disabled'
+ resp = client.get(url='/mount/')
+ assert resp['status'] == 200
+ assert resp['body'] == 'mount'
- def test_static_mount_chroot(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
- assert 'success' in self.conf(
- {
- "share": f'{temp_dir}/assets/dir$uri',
- "chroot": f'{temp_dir}/assets',
- },
- 'routes/0/action',
- ), 'configure chroot mount default'
+def test_static_mount_two_blocks(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
- assert self.get(url='/mount/')['status'] == 200, 'chroot'
+ os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
- assert 'success' in self.conf(
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"method": "HEAD"},
+ "action": {
+ "share": f'{temp_dir}/assets/dir$uri',
+ "traverse_mounts": False,
+ },
+ },
{
- "share": f'{temp_dir}/assets/dir$uri',
- "chroot": f'{temp_dir}/assets',
- "traverse_mounts": False,
+ "match": {"method": "GET"},
+ "action": {
+ "share": f'{temp_dir}/assets/dir$uri',
+ "traverse_mounts": True,
+ },
},
- 'routes/0/action',
- ), 'configure chroot mount disable'
+ ],
+ 'routes',
+ ), 'configure two options'
+
+ assert client.get(url='/mount/')['status'] == 200, 'block enabled'
+ assert client.head(url='/mount/')['status'] == 403, 'block disabled'
+
+
+def test_static_mount_chroot(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
+
+ assert 'success' in client.conf(
+ {
+ "share": f'{temp_dir}/assets/dir$uri',
+ "chroot": f'{temp_dir}/assets',
+ },
+ 'routes/0/action',
+ ), 'configure chroot mount default'
+
+ assert client.get(url='/mount/')['status'] == 200, 'chroot'
+
+ assert 'success' in client.conf(
+ {
+ "share": f'{temp_dir}/assets/dir$uri',
+ "chroot": f'{temp_dir}/assets',
+ "traverse_mounts": False,
+ },
+ 'routes/0/action',
+ ), 'configure chroot mount disable'
- assert self.get(url='/mount/')['status'] == 403, 'chroot mount'
+ assert client.get(url='/mount/')['status'] == 403, 'chroot mount'
diff --git a/test/test_static_share.py b/test/test_static_share.py
index 0166f1f0..ac5afb50 100644
--- a/test/test_static_share.py
+++ b/test/test_static_share.py
@@ -2,71 +2,72 @@ import os
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
-
-
-class TestStaticShare(TestApplicationProto):
- prerequisites = {}
-
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- os.makedirs(f'{temp_dir}/assets/dir')
- os.makedirs(f'{temp_dir}/assets/dir2')
-
- Path(f'{temp_dir}/assets/dir/file').write_text('1')
- Path(f'{temp_dir}/assets/dir2/file2').write_text('2')
-
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- "applications": {},
- }
- )
-
- def action_update(self, conf):
- assert 'success' in self.conf(conf, 'routes/0/action')
-
- def test_share_array(self, temp_dir):
- assert self.get(url='/dir/file')['body'] == '1'
- assert self.get(url='/dir2/file2')['body'] == '2'
-
- self.action_update({"share": [f'{temp_dir}/assets/dir$uri']})
-
- assert self.get(url='/file')['body'] == '1'
- assert self.get(url='/file2')['status'] == 404
-
- self.action_update(
- {
- "share": [
- f'{temp_dir}/assets/dir$uri',
- f'{temp_dir}/assets/dir2$uri',
- ]
- }
- )
-
- assert self.get(url='/file')['body'] == '1'
- assert self.get(url='/file2')['body'] == '2'
-
- self.action_update(
- {
- "share": [
- f'{temp_dir}/assets/dir2$uri',
- f'{temp_dir}/assets/dir3$uri',
- ]
- }
- )
-
- assert self.get(url='/file')['status'] == 404
- assert self.get(url='/file2')['body'] == '2'
-
- def test_share_array_fallback(self):
- self.action_update(
- {"share": ["/blah", "/blah2"], "fallback": {"return": 201}}
- )
-
- assert self.get()['status'] == 201
-
- def test_share_array_invalid(self):
- assert 'error' in self.conf({"share": []}, 'routes/0/action')
- assert 'error' in self.conf({"share": {}}, 'routes/0/action')
+from unit.applications.proto import ApplicationProto
+
+client = ApplicationProto()
+
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir')
+ os.makedirs(f'{temp_dir}/assets/dir2')
+
+ Path(f'{temp_dir}/assets/dir/file').write_text('1')
+ Path(f'{temp_dir}/assets/dir2/file2').write_text('2')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ "applications": {},
+ }
+ )
+
+
+def action_update(conf):
+ assert 'success' in client.conf(conf, 'routes/0/action')
+
+
+def test_share_array(temp_dir):
+ assert client.get(url='/dir/file')['body'] == '1'
+ assert client.get(url='/dir2/file2')['body'] == '2'
+
+ action_update({"share": [f'{temp_dir}/assets/dir$uri']})
+
+ assert client.get(url='/file')['body'] == '1'
+ assert client.get(url='/file2')['status'] == 404
+
+ action_update(
+ {
+ "share": [
+ f'{temp_dir}/assets/dir$uri',
+ f'{temp_dir}/assets/dir2$uri',
+ ]
+ }
+ )
+
+ assert client.get(url='/file')['body'] == '1'
+ assert client.get(url='/file2')['body'] == '2'
+
+ action_update(
+ {
+ "share": [
+ f'{temp_dir}/assets/dir2$uri',
+ f'{temp_dir}/assets/dir3$uri',
+ ]
+ }
+ )
+
+ assert client.get(url='/file')['status'] == 404
+ assert client.get(url='/file2')['body'] == '2'
+
+
+def test_share_array_fallback():
+ action_update({"share": ["/blah", "/blah2"], "fallback": {"return": 201}})
+
+ assert client.get()['status'] == 201
+
+
+def test_share_array_invalid():
+ assert 'error' in client.conf({"share": []}, 'routes/0/action')
+ assert 'error' in client.conf({"share": {}}, 'routes/0/action')
diff --git a/test/test_static_symlink.py b/test/test_static_symlink.py
index 13d67bc7..1f7d7907 100644
--- a/test/test_static_symlink.py
+++ b/test/test_static_symlink.py
@@ -2,92 +2,94 @@ import os
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
+prerequisites = {'features': {'chroot': True}}
-class TestStaticSymlink(TestApplicationProto):
- prerequisites = {'features': ['chroot']}
+client = ApplicationProto()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- os.makedirs(f'{temp_dir}/assets/dir/dir')
- Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
- Path(f'{temp_dir}/assets/dir/file').write_text('blah')
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- }
- )
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir/dir')
+ Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
+ Path(f'{temp_dir}/assets/dir/file').write_text('blah')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ }
+ )
+
- def test_static_symlink(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
+def test_static_symlink(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
- os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
+ os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
- assert self.get(url='/dir')['status'] == 301, 'dir'
- assert self.get(url='/dir/file')['status'] == 200, 'file'
- assert self.get(url='/link')['status'] == 301, 'symlink dir'
- assert self.get(url='/link/file')['status'] == 200, 'symlink file'
+ assert client.get(url='/dir')['status'] == 301, 'dir'
+ assert client.get(url='/dir/file')['status'] == 200, 'file'
+ assert client.get(url='/link')['status'] == 301, 'symlink dir'
+ assert client.get(url='/link/file')['status'] == 200, 'symlink file'
- assert 'success' in self.conf(
- {"share": f'{temp_dir}/assets$uri', "follow_symlinks": False},
- 'routes/0/action',
- ), 'configure symlink disable'
+ assert 'success' in client.conf(
+ {"share": f'{temp_dir}/assets$uri', "follow_symlinks": False},
+ 'routes/0/action',
+ ), 'configure symlink disable'
- assert self.get(url='/link/file')['status'] == 403, 'symlink disabled'
+ assert client.get(url='/link/file')['status'] == 403, 'symlink disabled'
- assert 'success' in self.conf(
- {"share": f'{temp_dir}/assets$uri', "follow_symlinks": True},
- 'routes/0/action',
- ), 'configure symlink enable'
+ assert 'success' in client.conf(
+ {"share": f'{temp_dir}/assets$uri', "follow_symlinks": True},
+ 'routes/0/action',
+ ), 'configure symlink enable'
- assert self.get(url='/link/file')['status'] == 200, 'symlink enabled'
+ assert client.get(url='/link/file')['status'] == 200, 'symlink enabled'
- def test_static_symlink_two_blocks(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
- os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
+def test_static_symlink_two_blocks(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
- assert 'success' in self.conf(
- [
- {
- "match": {"method": "HEAD"},
- "action": {
- "share": f'{temp_dir}/assets$uri',
- "follow_symlinks": False,
- },
+ os.symlink(f'{temp_dir}/assets/dir', f'{temp_dir}/assets/link')
+
+ assert 'success' in client.conf(
+ [
+ {
+ "match": {"method": "HEAD"},
+ "action": {
+ "share": f'{temp_dir}/assets$uri',
+ "follow_symlinks": False,
},
- {
- "match": {"method": "GET"},
- "action": {
- "share": f'{temp_dir}/assets$uri',
- "follow_symlinks": True,
- },
+ },
+ {
+ "match": {"method": "GET"},
+ "action": {
+ "share": f'{temp_dir}/assets$uri',
+ "follow_symlinks": True,
},
- ],
- 'routes',
- ), 'configure two options'
+ },
+ ],
+ 'routes',
+ ), 'configure two options'
- assert self.get(url='/link/file')['status'] == 200, 'block enabled'
- assert self.head(url='/link/file')['status'] == 403, 'block disabled'
+ assert client.get(url='/link/file')['status'] == 200, 'block enabled'
+ assert client.head(url='/link/file')['status'] == 403, 'block disabled'
- def test_static_symlink_chroot(self, temp_dir, skip_alert):
- skip_alert(r'opening.*failed')
- os.symlink(
- f'{temp_dir}/assets/dir/file', f'{temp_dir}/assets/dir/dir/link'
- )
+def test_static_symlink_chroot(temp_dir, skip_alert):
+ skip_alert(r'opening.*failed')
- assert self.get(url='/dir/dir/link')['status'] == 200, 'default chroot'
+ os.symlink(f'{temp_dir}/assets/dir/file', f'{temp_dir}/assets/dir/dir/link')
- assert 'success' in self.conf(
- {
- "share": f'{temp_dir}/assets$uri',
- "chroot": f'{temp_dir}/assets/dir/dir',
- },
- 'routes/0/action',
- ), 'configure chroot'
+ assert client.get(url='/dir/dir/link')['status'] == 200, 'default chroot'
+
+ assert 'success' in client.conf(
+ {
+ "share": f'{temp_dir}/assets$uri',
+ "chroot": f'{temp_dir}/assets/dir/dir',
+ },
+ 'routes/0/action',
+ ), 'configure chroot'
- assert self.get(url='/dir/dir/link')['status'] == 404, 'chroot'
+ assert client.get(url='/dir/dir/link')['status'] == 404, 'chroot'
diff --git a/test/test_static_types.py b/test/test_static_types.py
index 28ab28e6..8cd28ca4 100644
--- a/test/test_static_types.py
+++ b/test/test_static_types.py
@@ -1,172 +1,173 @@
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
-
-
-class TestStaticTypes(TestApplicationProto):
- prerequisites = {}
-
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- Path(f'{temp_dir}/assets').mkdir()
- for ext in ['.xml', '.mp4', '.php', '', '.txt', '.html', '.png']:
- Path(f'{temp_dir}/assets/file{ext}').write_text(ext)
-
- Path(f'{temp_dir}/assets/index.html').write_text('index')
-
- self._load_conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "routes"},
- },
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- "applications": {},
- }
- )
-
- def action_update(self, conf):
- assert 'success' in self.conf(conf, 'routes/0/action')
-
- def check_body(self, http_url, body):
- resp = self.get(url=http_url)
- assert resp['status'] == 200, 'status'
- assert resp['body'] == body, 'body'
-
- def test_static_types_basic(self, temp_dir):
- self.action_update({"share": f'{temp_dir}/assets$uri'})
- self.check_body('/index.html', 'index')
- self.check_body('/file.xml', '.xml')
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": "application/xml"}
- )
- self.check_body('/file.xml', '.xml')
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["application/xml"]}
- )
- self.check_body('/file.xml', '.xml')
-
- self.action_update({"share": f'{temp_dir}/assets$uri', "types": [""]})
- assert self.get(url='/file.xml')['status'] == 403, 'no mtype'
-
- def test_static_types_wildcard(self, temp_dir):
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["application/*"]}
- )
- self.check_body('/file.xml', '.xml')
- assert self.get(url='/file.mp4')['status'] == 403, 'app * mtype mp4'
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["video/*"]}
- )
- assert self.get(url='/file.xml')['status'] == 403, 'video * mtype xml'
- self.check_body('/file.mp4', '.mp4')
-
- def test_static_types_negation(self, temp_dir):
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["!application/xml"]}
- )
- assert self.get(url='/file.xml')['status'] == 403, 'forbidden negation'
- self.check_body('/file.mp4', '.mp4')
-
- # sorting negation
- self.action_update(
- {
- "share": f'{temp_dir}/assets$uri',
- "types": ["!video/*", "image/png", "!image/jpg"],
- }
- )
- assert self.get(url='/file.mp4')['status'] == 403, 'negation sort mp4'
- self.check_body('/file.png', '.png')
- assert self.get(url='/file.jpg')['status'] == 403, 'negation sort jpg'
-
- def test_static_types_regex(self, temp_dir):
- self.action_update(
+from unit.applications.proto import ApplicationProto
+
+client = ApplicationProto()
+
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ Path(f'{temp_dir}/assets').mkdir()
+ for ext in ['.xml', '.mp4', '.php', '', '.txt', '.html', '.png']:
+ Path(f'{temp_dir}/assets/file{ext}').write_text(ext)
+
+ Path(f'{temp_dir}/assets/index.html').write_text('index')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "routes"},
+ },
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ "applications": {},
+ }
+ )
+
+
+def action_update(conf):
+ assert 'success' in client.conf(conf, 'routes/0/action')
+
+
+def check_body(http_url, body):
+ resp = client.get(url=http_url)
+ assert resp['status'] == 200, 'status'
+ assert resp['body'] == body, 'body'
+
+
+def test_static_types_basic(temp_dir):
+ action_update({"share": f'{temp_dir}/assets$uri'})
+ check_body('/index.html', 'index')
+ check_body('/file.xml', '.xml')
+
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": "application/xml"}
+ )
+ check_body('/file.xml', '.xml')
+
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": ["application/xml"]}
+ )
+ check_body('/file.xml', '.xml')
+
+ action_update({"share": f'{temp_dir}/assets$uri', "types": [""]})
+ assert client.get(url='/file.xml')['status'] == 403, 'no mtype'
+
+
+def test_static_types_wildcard(temp_dir):
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": ["application/*"]}
+ )
+ check_body('/file.xml', '.xml')
+ assert client.get(url='/file.mp4')['status'] == 403, 'app * mtype mp4'
+
+ action_update({"share": f'{temp_dir}/assets$uri', "types": ["video/*"]})
+ assert client.get(url='/file.xml')['status'] == 403, 'video * mtype xml'
+ check_body('/file.mp4', '.mp4')
+
+
+def test_static_types_negation(temp_dir):
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": ["!application/xml"]}
+ )
+ assert client.get(url='/file.xml')['status'] == 403, 'forbidden negation'
+ check_body('/file.mp4', '.mp4')
+
+ # sorting negation
+ action_update(
+ {
+ "share": f'{temp_dir}/assets$uri',
+ "types": ["!video/*", "image/png", "!image/jpg"],
+ }
+ )
+ assert client.get(url='/file.mp4')['status'] == 403, 'negation sort mp4'
+ check_body('/file.png', '.png')
+ assert client.get(url='/file.jpg')['status'] == 403, 'negation sort jpg'
+
+
+def test_static_types_regex(temp_dir):
+ action_update(
+ {
+ "share": f'{temp_dir}/assets$uri',
+ "types": ["~text/(html|plain)"],
+ }
+ )
+ assert client.get(url='/file.php')['status'] == 403, 'regex fail'
+ check_body('/file.html', '.html')
+ check_body('/file.txt', '.txt')
+
+
+def test_static_types_case(temp_dir):
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": ["!APpliCaTiOn/xMl"]}
+ )
+ check_body('/file.mp4', '.mp4')
+ assert (
+ client.get(url='/file.xml')['status'] == 403
+ ), 'mixed case xml negation'
+
+ action_update({"share": f'{temp_dir}/assets$uri', "types": ["vIdEo/mp4"]})
+ assert client.get(url='/file.mp4')['status'] == 200, 'mixed case'
+ assert (
+ client.get(url='/file.xml')['status'] == 403
+ ), 'mixed case video negation'
+
+ action_update({"share": f'{temp_dir}/assets$uri', "types": ["vIdEo/*"]})
+ check_body('/file.mp4', '.mp4')
+ assert (
+ client.get(url='/file.xml')['status'] == 403
+ ), 'mixed case video * negation'
+
+
+def test_static_types_fallback(temp_dir):
+ assert 'success' in client.conf(
+ [
{
- "share": f'{temp_dir}/assets$uri',
- "types": ["~text/(html|plain)"],
- }
- )
- assert self.get(url='/file.php')['status'] == 403, 'regex fail'
- self.check_body('/file.html', '.html')
- self.check_body('/file.txt', '.txt')
-
- def test_static_types_case(self, temp_dir):
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["!APpliCaTiOn/xMl"]}
- )
- self.check_body('/file.mp4', '.mp4')
- assert (
- self.get(url='/file.xml')['status'] == 403
- ), 'mixed case xml negation'
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["vIdEo/mp4"]}
- )
- assert self.get(url='/file.mp4')['status'] == 200, 'mixed case'
- assert (
- self.get(url='/file.xml')['status'] == 403
- ), 'mixed case video negation'
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["vIdEo/*"]}
- )
- self.check_body('/file.mp4', '.mp4')
- assert (
- self.get(url='/file.xml')['status'] == 403
- ), 'mixed case video * negation'
-
- def test_static_types_fallback(self, temp_dir):
- assert 'success' in self.conf(
- [
- {
- "match": {"destination": "*:7081"},
- "action": {"return": 200},
- },
- {
- "action": {
- "share": f'{temp_dir}/assets$uri',
- "types": ["!application/x-httpd-php"],
- "fallback": {"proxy": "http://127.0.0.1:7081"},
- }
- },
- ],
- 'routes',
- ), 'configure fallback proxy route'
-
- self.check_body('/file.php', '')
- self.check_body('/file.mp4', '.mp4')
-
- def test_static_types_index(self, temp_dir):
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": "application/xml"}
- )
- self.check_body('/', 'index')
- self.check_body('/file.xml', '.xml')
- assert self.get(url='/index.html')['status'] == 403, 'forbidden mtype'
- assert self.get(url='/file.mp4')['status'] == 403, 'forbidden mtype'
-
- def test_static_types_custom_mime(self, temp_dir):
- self._load_conf(
+ "match": {"destination": "*:7081"},
+ "action": {"return": 200},
+ },
{
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- "applications": {},
- "settings": {
- "http": {
- "static": {"mime_types": {"test/mime-type": ["file"]}}
- }
- },
- }
- )
-
- self.action_update({"share": f'{temp_dir}/assets$uri', "types": [""]})
- assert self.get(url='/file')['status'] == 403, 'forbidden custom mime'
-
- self.action_update(
- {"share": f'{temp_dir}/assets$uri', "types": ["test/mime-type"]}
- )
- self.check_body('/file', '')
+ "action": {
+ "share": f'{temp_dir}/assets$uri',
+ "types": ["!application/x-httpd-php"],
+ "fallback": {"proxy": "http://127.0.0.1:7081"},
+ }
+ },
+ ],
+ 'routes',
+ ), 'configure fallback proxy route'
+
+ check_body('/file.php', '')
+ check_body('/file.mp4', '.mp4')
+
+
+def test_static_types_index(temp_dir):
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": "application/xml"}
+ )
+ check_body('/', 'index')
+ check_body('/file.xml', '.xml')
+ assert client.get(url='/index.html')['status'] == 403, 'forbidden mtype'
+ assert client.get(url='/file.mp4')['status'] == 403, 'forbidden mtype'
+
+
+def test_static_types_custom_mime(temp_dir):
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ "applications": {},
+ "settings": {
+ "http": {"static": {"mime_types": {"test/mime-type": ["file"]}}}
+ },
+ }
+ )
+
+ action_update({"share": f'{temp_dir}/assets$uri', "types": [""]})
+ assert client.get(url='/file')['status'] == 403, 'forbidden custom mime'
+
+ action_update(
+ {"share": f'{temp_dir}/assets$uri', "types": ["test/mime-type"]}
+ )
+ check_body('/file', '')
diff --git a/test/test_static_variables.py b/test/test_static_variables.py
index 370c3e6f..bc39e90e 100644
--- a/test/test_static_variables.py
+++ b/test/test_static_variables.py
@@ -2,78 +2,82 @@ import os
from pathlib import Path
import pytest
-from unit.applications.proto import TestApplicationProto
-
-
-class TestStaticVariables(TestApplicationProto):
- prerequisites = {}
-
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, temp_dir):
- os.makedirs(f'{temp_dir}/assets/dir')
- os.makedirs(f'{temp_dir}/assets/d$r')
- Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
- Path(f'{temp_dir}/assets/dir/file').write_text('file')
- Path(f'{temp_dir}/assets/d$r/file').write_text('d$r')
-
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
- }
- )
-
- def update_share(self, share):
- if isinstance(share, list):
- return self.conf(share, 'routes/0/action/share')
-
- return self.conf(f'"{share}"', 'routes/0/action/share')
-
- def test_static_variables(self, temp_dir):
- assert self.get(url='/index.html')['status'] == 200
- assert self.get(url='/d$r/file')['status'] == 200
-
- assert 'success' in self.update_share('$uri')
- assert self.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
-
- assert 'success' in self.update_share(f'{temp_dir}/assets${{uri}}')
- assert self.get(url='/index.html')['status'] == 200
-
- def test_static_variables_array(self, temp_dir):
- assert 'success' in self.update_share(
- [f'{temp_dir}/assets$uri', '$uri']
- )
-
- assert self.get(url='/dir/file')['status'] == 200
- assert self.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
- assert self.get(url='/blah')['status'] == 404
-
- assert 'success' in self.conf(
- {
- "share": [f'{temp_dir}/assets$uri', '$uri'],
- "fallback": {"return": 201},
- },
- 'routes/0/action',
- )
-
- assert self.get(url='/dir/file')['status'] == 200
- assert self.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
- assert self.get(url='/dir/blah')['status'] == 201
-
- def test_static_variables_buildin_start(self, temp_dir):
- assert 'success' in self.update_share('$uri/assets/index.html')
- assert self.get(url=temp_dir)['status'] == 200
-
- def test_static_variables_buildin_mid(self, temp_dir):
- assert 'success' in self.update_share(f'{temp_dir}$uri/index.html')
- assert self.get(url='/assets')['status'] == 200
-
- def test_static_variables_buildin_end(self):
- assert self.get(url='/index.html')['status'] == 200
-
- def test_static_variables_invalid(self, temp_dir):
- assert 'error' in self.update_share(f'{temp_dir}/assets/d$r$uri')
- assert 'error' in self.update_share(f'{temp_dir}/assets/$$uri')
- assert 'error' in self.update_share(
- [f'{temp_dir}/assets$uri', f'{temp_dir}/assets/dir', '$$uri']
- )
+from unit.applications.proto import ApplicationProto
+
+client = ApplicationProto()
+
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture(temp_dir):
+ os.makedirs(f'{temp_dir}/assets/dir')
+ os.makedirs(f'{temp_dir}/assets/d$r')
+ Path(f'{temp_dir}/assets/index.html').write_text('0123456789')
+ Path(f'{temp_dir}/assets/dir/file').write_text('file')
+ Path(f'{temp_dir}/assets/d$r/file').write_text('d$r')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"share": f'{temp_dir}/assets$uri'}}],
+ }
+ )
+
+
+def update_share(share):
+ if isinstance(share, list):
+ return client.conf(share, 'routes/0/action/share')
+
+ return client.conf(f'"{share}"', 'routes/0/action/share')
+
+
+def test_static_variables(temp_dir):
+ assert client.get(url='/index.html')['status'] == 200
+ assert client.get(url='/d$r/file')['status'] == 200
+
+ assert 'success' in update_share('$uri')
+ assert client.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
+
+ assert 'success' in update_share(f'{temp_dir}/assets${{uri}}')
+ assert client.get(url='/index.html')['status'] == 200
+
+
+def test_static_variables_array(temp_dir):
+ assert 'success' in update_share([f'{temp_dir}/assets$uri', '$uri'])
+
+ assert client.get(url='/dir/file')['status'] == 200
+ assert client.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
+ assert client.get(url='/blah')['status'] == 404
+
+ assert 'success' in client.conf(
+ {
+ "share": [f'{temp_dir}/assets$uri', '$uri'],
+ "fallback": {"return": 201},
+ },
+ 'routes/0/action',
+ )
+
+ assert client.get(url='/dir/file')['status'] == 200
+ assert client.get(url=f'{temp_dir}/assets/index.html')['status'] == 200
+ assert client.get(url='/dir/blah')['status'] == 201
+
+
+def test_static_variables_buildin_start(temp_dir):
+ assert 'success' in update_share('$uri/assets/index.html')
+ assert client.get(url=temp_dir)['status'] == 200
+
+
+def test_static_variables_buildin_mid(temp_dir):
+ assert 'success' in update_share(f'{temp_dir}$uri/index.html')
+ assert client.get(url='/assets')['status'] == 200
+
+
+def test_static_variables_buildin_end():
+ assert client.get(url='/index.html')['status'] == 200
+
+
+def test_static_variables_invalid(temp_dir):
+ assert 'error' in update_share(f'{temp_dir}/assets/d$r$uri')
+ assert 'error' in update_share(f'{temp_dir}/assets/$$uri')
+ assert 'error' in update_share(
+ [f'{temp_dir}/assets$uri', f'{temp_dir}/assets/dir', '$$uri']
+ )
diff --git a/test/test_status.py b/test/test_status.py
index d0901f42..11b140cf 100644
--- a/test/test_status.py
+++ b/test/test_status.py
@@ -1,76 +1,79 @@
import time
-import pytest
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
from unit.status import Status
+prerequisites = {'modules': {'python': 'any'}}
-class TestStatus(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def check_connections(self, accepted, active, idle, closed):
- Status.get('/connections') == {
- 'accepted': accepted,
- 'active': active,
- 'idle': idle,
- 'closed': closed,
- }
- def app_default(self, name="empty", module="wsgi"):
- name_dir = f'{option.test_dir}/python/{name}'
- return {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": name_dir,
- "working_directory": name_dir,
- "module": module,
- }
+def check_connections(accepted, active, idle, closed):
+ assert Status.get('/connections') == {
+ 'accepted': accepted,
+ 'active': active,
+ 'idle': idle,
+ 'closed': closed,
+ }
- def test_status(self):
- assert 'error' in self.conf_delete('/status'), 'DELETE method'
- def test_status_requests(self, skip_alert):
- skip_alert(r'Python failed to import module "blah"')
+def app_default(name="empty", module="wsgi"):
+ name_dir = f'{option.test_dir}/python/{name}'
+ return {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": name_dir,
+ "working_directory": name_dir,
+ "module": module,
+ }
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/empty"},
- "*:7082": {"pass": "applications/blah"},
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {
- "empty": self.app_default(),
- "blah": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "module": "blah",
- },
+
+def test_status():
+ assert 'error' in client.conf_delete('/status'), 'DELETE method'
+
+
+def test_status_requests(skip_alert):
+ skip_alert(r'Python failed to import module "blah"')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/empty"},
+ "*:7082": {"pass": "applications/blah"},
+ },
+ "routes": [{"action": {"return": 200}}],
+ "applications": {
+ "empty": app_default(),
+ "blah": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "module": "blah",
},
},
- )
+ },
+ )
- Status.init()
+ Status.init()
- assert self.get()['status'] == 200
- assert Status.get('/requests/total') == 1, '2xx'
+ assert client.get()['status'] == 200
+ assert Status.get('/requests/total') == 1, '2xx'
- assert self.get(port=7081)['status'] == 200
- assert Status.get('/requests/total') == 2, '2xx app'
+ assert client.get(port=7081)['status'] == 200
+ assert Status.get('/requests/total') == 2, '2xx app'
- assert (
- self.get(headers={'Host': '/', 'Connection': 'close'})['status']
- == 400
- )
- assert Status.get('/requests/total') == 3, '4xx'
+ assert (
+ client.get(headers={'Host': '/', 'Connection': 'close'})['status']
+ == 400
+ )
+ assert Status.get('/requests/total') == 3, '4xx'
- assert self.get(port=7082)['status'] == 503
- assert Status.get('/requests/total') == 4, '5xx'
+ assert client.get(port=7082)['status'] == 503
+ assert Status.get('/requests/total') == 4, '5xx'
- self.http(
- b"""GET / HTTP/1.1
+ client.http(
+ b"""GET / HTTP/1.1
Host: localhost
GET / HTTP/1.1
@@ -78,154 +81,162 @@ Host: localhost
Connection: close
""",
- raw=True,
- )
- assert Status.get('/requests/total') == 6, 'pipeline'
+ raw=True,
+ )
+ assert Status.get('/requests/total') == 6, 'pipeline'
- sock = self.get(port=7081, no_recv=True)
+ sock = client.get(port=7081, no_recv=True)
- time.sleep(1)
+ time.sleep(1)
- assert Status.get('/requests/total') == 7, 'no receive'
+ assert Status.get('/requests/total') == 7, 'no receive'
- sock.close()
+ sock.close()
- def test_status_connections(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/delayed"},
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {
- "delayed": self.app_default("delayed"),
- },
+
+def test_status_connections():
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/delayed"},
},
- )
+ "routes": [{"action": {"return": 200}}],
+ "applications": {
+ "delayed": app_default("delayed"),
+ },
+ },
+ )
+
+ Status.init()
+
+ # accepted, closed
- Status.init()
+ assert client.get()['status'] == 200
+ check_connections(1, 0, 0, 1)
- # accepted, closed
+ # idle
- assert self.get()['status'] == 200
- self.check_connections(1, 0, 0, 1)
+ (_, sock) = client.get(
+ headers={'Host': 'localhost', 'Connection': 'keep-alive'},
+ start=True,
+ read_timeout=1,
+ )
- # idle
+ check_connections(2, 0, 1, 1)
- sock = self.http(b'', raw=True, no_recv=True)
- self.check_connections(2, 0, 1, 1)
+ client.get(sock=sock)
+ check_connections(2, 0, 0, 2)
- self.get(sock=sock)
- self.check_connections(2, 0, 0, 2)
+ # active
- # active
+ (_, sock) = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ port=7081,
+ start=True,
+ read_timeout=1,
+ )
+ check_connections(3, 1, 0, 2)
- (_, sock) = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
+ client.get(sock=sock)
+ check_connections(3, 0, 0, 3)
+
+
+def test_status_applications():
+ def check_applications(expert):
+ apps = list(client.conf_get('/status/applications').keys()).sort()
+ assert apps == expert.sort()
+
+ def check_application(name, running, starting, idle, active):
+ assert Status.get(f'/applications/{name}') == {
+ 'processes': {
+ 'running': running,
+ 'starting': starting,
+ 'idle': idle,
},
- port=7081,
- start=True,
- read_timeout=1,
- )
- self.check_connections(3, 1, 0, 2)
-
- self.get(sock=sock)
- self.check_connections(3, 0, 0, 3)
-
- def test_status_applications(self):
- def check_applications(expert):
- apps = list(self.conf_get('/status/applications').keys()).sort()
- assert apps == expert.sort()
-
- def check_application(name, running, starting, idle, active):
- Status.get(f'/applications/{name}') == {
- 'processes': {
- 'running': running,
- 'starting': starting,
- 'idle': idle,
- },
- 'requests': {'active': active},
- }
+ 'requests': {'active': active},
+ }
+
+ client.load('delayed')
+ Status.init()
+
+ check_applications(['delayed'])
+ check_application('delayed', 0, 0, 0, 0)
- self.load('delayed')
- Status.init()
+ # idle
- check_applications(['delayed'])
- check_application('delayed', 0, 0, 0, 0)
+ assert client.get()['status'] == 200
+ check_application('delayed', 1, 0, 1, 0)
- # idle
+ assert 'success' in client.conf('4', 'applications/delayed/processes')
+ check_application('delayed', 4, 0, 4, 0)
- assert self.get()['status'] == 200
- check_application('delayed', 1, 0, 1, 0)
+ # active
- assert 'success' in self.conf('4', 'applications/delayed/processes')
- check_application('delayed', 4, 0, 4, 0)
+ (_, sock) = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'X-Delay': '2',
+ 'Connection': 'close',
+ },
+ start=True,
+ read_timeout=1,
+ )
+ check_application('delayed', 4, 0, 3, 1)
+ sock.close()
- # active
+ # starting
- (_, sock) = self.get(
- headers={
- 'Host': 'localhost',
- 'X-Delay': '2',
- 'Connection': 'close',
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "applications/restart"},
+ "*:7081": {"pass": "applications/delayed"},
},
- start=True,
- read_timeout=1,
- )
- check_application('delayed', 4, 0, 3, 1)
- sock.close()
-
- # starting
-
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "applications/restart"},
- "*:7081": {"pass": "applications/delayed"},
- },
- "routes": [],
- "applications": {
- "restart": self.app_default("restart", "longstart"),
- "delayed": self.app_default("delayed"),
- },
+ "routes": [],
+ "applications": {
+ "restart": app_default("restart", "longstart"),
+ "delayed": app_default("delayed"),
},
- )
- Status.init()
+ },
+ )
+ Status.init()
- check_applications(['delayed', 'restart'])
- check_application('restart', 0, 0, 0, 0)
- check_application('delayed', 0, 0, 0, 0)
+ check_applications(['delayed', 'restart'])
+ check_application('restart', 0, 0, 0, 0)
+ check_application('delayed', 0, 0, 0, 0)
- self.get(read_timeout=1)
+ client.get(read_timeout=1)
- check_application('restart', 0, 1, 0, 1)
- check_application('delayed', 0, 0, 0, 0)
+ check_application('restart', 0, 1, 0, 1)
+ check_application('delayed', 0, 0, 0, 0)
- def test_status_proxy(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {"pass": "applications/empty"},
- },
- "routes": [
- {
- "match": {"uri": "/"},
- "action": {"proxy": "http://127.0.0.1:7081"},
- }
- ],
- "applications": {
- "empty": self.app_default(),
- },
+
+def test_status_proxy():
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {"pass": "applications/empty"},
+ },
+ "routes": [
+ {
+ "match": {"uri": "/"},
+ "action": {"proxy": "http://127.0.0.1:7081"},
+ }
+ ],
+ "applications": {
+ "empty": app_default(),
},
- )
+ },
+ )
- Status.init()
+ Status.init()
- assert self.get()['status'] == 200
- self.check_connections(2, 0, 0, 2)
- assert Status.get('/requests/total') == 2, 'proxy'
+ assert client.get()['status'] == 200
+ check_connections(2, 0, 0, 2)
+ assert Status.get('/requests/total') == 2, 'proxy'
diff --git a/test/test_status_tls.py b/test/test_status_tls.py
index dc3d68da..784b4960 100644
--- a/test/test_status_tls.py
+++ b/test/test_status_tls.py
@@ -1,30 +1,31 @@
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
from unit.status import Status
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestStatusTLS(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- def test_status_tls_requests(self):
- self.certificate()
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "routes"},
- "*:7081": {
- "pass": "routes",
- "tls": {"certificate": "default"},
- },
+def test_status_tls_requests():
+ client.certificate()
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "routes"},
+ "*:7081": {
+ "pass": "routes",
+ "tls": {"certificate": "default"},
},
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- )
+ },
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ )
- Status.init()
+ Status.init()
- assert self.get()['status'] == 200
- assert self.get_ssl(port=7081)['status'] == 200
+ assert client.get()['status'] == 200
+ assert client.get_ssl(port=7081)['status'] == 200
- assert Status.get('/requests/total') == 2
+ assert Status.get('/requests/total') == 2
diff --git a/test/test_tls.py b/test/test_tls.py
index 06c38d0b..54fdb665 100644
--- a/test/test_tls.py
+++ b/test/test_tls.py
@@ -4,54 +4,58 @@ import subprocess
import time
import pytest
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
from unit.option import option
-
-class TestTLS(TestApplicationTLS):
- prerequisites = {'modules': {'python': 'any', 'openssl': 'any'}}
-
- def openssl_date_to_sec_epoch(self, date):
- return self.date_to_sec_epoch(date, '%b %d %X %Y %Z')
-
- def add_tls(self, application='empty', cert='default', port=7080):
- assert 'success' in self.conf(
- {
- "pass": f"applications/{application}",
- "tls": {"certificate": cert},
- },
- f'listeners/*:{port}',
- )
-
- def remove_tls(self, application='empty', port=7080):
- assert 'success' in self.conf(
- {"pass": f"applications/{application}"}, f'listeners/*:{port}'
- )
-
- def req(self, name='localhost', subject=None, x509=False):
- subj = subject if subject is not None else f'/CN={name}/'
-
- subprocess.check_output(
- [
- 'openssl',
- 'req',
- '-new',
- '-subj',
- subj,
- '-config',
- f'{option.temp_dir}/openssl.conf',
- '-out',
- f'{option.temp_dir}/{name}.csr',
- '-keyout',
- f'{option.temp_dir}/{name}.key',
- ],
- stderr=subprocess.STDOUT,
- )
-
- def generate_ca_conf(self):
- with open(f'{option.temp_dir}/ca.conf', 'w') as f:
- f.write(
- f"""[ ca ]
+prerequisites = {'modules': {'python': 'any', 'openssl': 'any'}}
+
+client = ApplicationTLS()
+
+
+def add_tls(application='empty', cert='default', port=7080):
+ assert 'success' in client.conf(
+ {
+ "pass": f"applications/{application}",
+ "tls": {"certificate": cert},
+ },
+ f'listeners/*:{port}',
+ )
+
+
+def ca(cert='root', out='localhost'):
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'ca',
+ '-batch',
+ '-config',
+ f'{option.temp_dir}/ca.conf',
+ '-keyfile',
+ f'{option.temp_dir}/{cert}.key',
+ '-cert',
+ f'{option.temp_dir}/{cert}.crt',
+ '-in',
+ f'{option.temp_dir}/{out}.csr',
+ '-out',
+ f'{option.temp_dir}/{out}.crt',
+ ],
+ stderr=subprocess.STDOUT,
+ )
+
+
+def context_cert_req(cert='root'):
+ context = ssl.create_default_context()
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_REQUIRED
+ context.load_verify_locations(f'{option.temp_dir}/{cert}.crt')
+
+ return context
+
+
+def generate_ca_conf():
+ with open(f'{option.temp_dir}/ca.conf', 'w') as f:
+ f.write(
+ f"""[ ca ]
default_ca = myca
[ myca ]
@@ -69,615 +73,632 @@ commonName = optional
[ myca_extensions ]
basicConstraints = critical,CA:TRUE"""
- )
-
- with open(f'{option.temp_dir}/certserial', 'w') as f:
- f.write('1000')
-
- with open(f'{option.temp_dir}/certindex', 'w') as f:
- f.write('')
-
- with open(f'{option.temp_dir}/certindex.attr', 'w') as f:
- f.write('')
-
- def ca(self, cert='root', out='localhost'):
- subprocess.check_output(
- [
- 'openssl',
- 'ca',
- '-batch',
- '-config',
- f'{option.temp_dir}/ca.conf',
- '-keyfile',
- f'{option.temp_dir}/{cert}.key',
- '-cert',
- f'{option.temp_dir}/{cert}.crt',
- '-in',
- f'{option.temp_dir}/{out}.csr',
- '-out',
- f'{option.temp_dir}/{out}.crt',
- ],
- stderr=subprocess.STDOUT,
)
- def set_certificate_req_context(self, cert='root'):
- self.context = ssl.create_default_context()
- self.context.check_hostname = False
- self.context.verify_mode = ssl.CERT_REQUIRED
- self.context.load_verify_locations(f'{option.temp_dir}/{cert}.crt')
+ with open(f'{option.temp_dir}/certserial', 'w') as f:
+ f.write('1000')
- def test_tls_listener_option_add(self):
- self.load('empty')
+ with open(f'{option.temp_dir}/certindex', 'w') as f:
+ f.write('')
- self.certificate()
+ with open(f'{option.temp_dir}/certindex.attr', 'w') as f:
+ f.write('')
- self.add_tls()
- assert self.get_ssl()['status'] == 200, 'add listener option'
+def remove_tls(application='empty', port=7080):
+ assert 'success' in client.conf(
+ {"pass": f"applications/{application}"}, f'listeners/*:{port}'
+ )
- def test_tls_listener_option_remove(self):
- self.load('empty')
- self.certificate()
+def req(name='localhost', subject=None):
+ subj = subject if subject is not None else f'/CN={name}/'
- self.add_tls()
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'req',
+ '-new',
+ '-subj',
+ subj,
+ '-config',
+ f'{option.temp_dir}/openssl.conf',
+ '-out',
+ f'{option.temp_dir}/{name}.csr',
+ '-keyout',
+ f'{option.temp_dir}/{name}.key',
+ ],
+ stderr=subprocess.STDOUT,
+ )
- self.get_ssl()
- self.remove_tls()
+def test_tls_listener_option_add():
+ client.load('empty')
- assert self.get()['status'] == 200, 'remove listener option'
+ client.certificate()
- def test_tls_certificate_remove(self):
- self.load('empty')
+ add_tls()
- self.certificate()
+ assert client.get_ssl()['status'] == 200, 'add listener option'
- assert 'success' in self.conf_delete(
- '/certificates/default'
- ), 'remove certificate'
- def test_tls_certificate_remove_used(self):
- self.load('empty')
+def test_tls_listener_option_remove():
+ client.load('empty')
- self.certificate()
+ client.certificate()
- self.add_tls()
+ add_tls()
- assert 'error' in self.conf_delete(
- '/certificates/default'
- ), 'remove certificate'
+ client.get_ssl()
- def test_tls_certificate_remove_nonexisting(self):
- self.load('empty')
+ remove_tls()
- self.certificate()
+ assert client.get()['status'] == 200, 'remove listener option'
- self.add_tls()
- assert 'error' in self.conf_delete(
- '/certificates/blah'
- ), 'remove nonexistings certificate'
+def test_tls_certificate_remove():
+ client.load('empty')
- @pytest.mark.skip('not yet')
- def test_tls_certificate_update(self):
- self.load('empty')
+ client.certificate()
- self.certificate()
+ assert 'success' in client.conf_delete(
+ '/certificates/default'
+ ), 'remove certificate'
- self.add_tls()
- cert_old = ssl.get_server_certificate(('127.0.0.1', 7080))
+def test_tls_certificate_remove_used():
+ client.load('empty')
- self.certificate()
+ client.certificate()
- assert cert_old != ssl.get_server_certificate(
- ('127.0.0.1', 7080)
- ), 'update certificate'
+ add_tls()
- @pytest.mark.skip('not yet')
- def test_tls_certificate_key_incorrect(self):
- self.load('empty')
+ assert 'error' in client.conf_delete(
+ '/certificates/default'
+ ), 'remove certificate'
- self.certificate('first', False)
- self.certificate('second', False)
- assert 'error' in self.certificate_load(
- 'first', 'second'
- ), 'key incorrect'
+def test_tls_certificate_remove_nonexisting():
+ client.load('empty')
- def test_tls_certificate_change(self):
- self.load('empty')
+ client.certificate()
- self.certificate()
- self.certificate('new')
+ add_tls()
- self.add_tls()
+ assert 'error' in client.conf_delete(
+ '/certificates/blah'
+ ), 'remove nonexistings certificate'
- cert_old = ssl.get_server_certificate(('127.0.0.1', 7080))
- self.add_tls(cert='new')
+@pytest.mark.skip('not yet')
+def test_tls_certificate_update():
+ client.load('empty')
- assert cert_old != ssl.get_server_certificate(
- ('127.0.0.1', 7080)
- ), 'change certificate'
+ client.certificate()
- def test_tls_certificate_key_rsa(self):
- self.load('empty')
+ add_tls()
- self.certificate()
+ cert_old = ssl.get_server_certificate(('127.0.0.1', 7080))
- assert (
- self.conf_get('/certificates/default/key') == 'RSA (2048 bits)'
- ), 'certificate key rsa'
+ client.certificate()
- def test_tls_certificate_key_ec(self, temp_dir):
- self.load('empty')
+ assert cert_old != ssl.get_server_certificate(
+ ('127.0.0.1', 7080)
+ ), 'update certificate'
- self.openssl_conf()
- subprocess.check_output(
- [
- 'openssl',
- 'ecparam',
- '-noout',
- '-genkey',
- '-out',
- f'{temp_dir}/ec.key',
- '-name',
- 'prime256v1',
- ],
- stderr=subprocess.STDOUT,
- )
+@pytest.mark.skip('not yet')
+def test_tls_certificate_key_incorrect():
+ client.load('empty')
- subprocess.check_output(
- [
- 'openssl',
- 'req',
- '-x509',
- '-new',
- '-subj',
- '/CN=ec/',
- '-config',
- f'{temp_dir}/openssl.conf',
- '-key',
- f'{temp_dir}/ec.key',
- '-out',
- f'{temp_dir}/ec.crt',
- ],
- stderr=subprocess.STDOUT,
- )
+ client.certificate('first', False)
+ client.certificate('second', False)
- self.certificate_load('ec')
+ assert 'error' in client.certificate_load(
+ 'first', 'second'
+ ), 'key incorrect'
- assert (
- self.conf_get('/certificates/ec/key') == 'ECDH'
- ), 'certificate key ec'
- def test_tls_certificate_chain_options(self):
- self.load('empty')
+def test_tls_certificate_change():
+ client.load('empty')
- self.certificate()
+ client.certificate()
+ client.certificate('new')
- chain = self.conf_get('/certificates/default/chain')
+ add_tls()
- assert len(chain) == 1, 'certificate chain length'
+ cert_old = ssl.get_server_certificate(('127.0.0.1', 7080))
- cert = chain[0]
+ add_tls(cert='new')
- assert (
- cert['subject']['common_name'] == 'default'
- ), 'certificate subject common name'
- assert (
- cert['issuer']['common_name'] == 'default'
- ), 'certificate issuer common name'
+ assert cert_old != ssl.get_server_certificate(
+ ('127.0.0.1', 7080)
+ ), 'change certificate'
- assert (
- abs(
- self.sec_epoch()
- - self.openssl_date_to_sec_epoch(cert['validity']['since'])
- )
- < 60
- ), 'certificate validity since'
- assert (
- self.openssl_date_to_sec_epoch(cert['validity']['until'])
- - self.openssl_date_to_sec_epoch(cert['validity']['since'])
- == 2592000
- ), 'certificate validity until'
- def test_tls_certificate_chain(self, temp_dir):
- self.load('empty')
+def test_tls_certificate_key_rsa():
+ client.load('empty')
- self.certificate('root', False)
+ client.certificate()
- self.req('int')
- self.req('end')
+ assert (
+ client.conf_get('/certificates/default/key') == 'RSA (2048 bits)'
+ ), 'certificate key rsa'
- self.generate_ca_conf()
- self.ca(cert='root', out='int')
- self.ca(cert='int', out='end')
+def test_tls_certificate_key_ec(temp_dir):
+ client.load('empty')
- crt_path = f'{temp_dir}/end-int.crt'
- end_path = f'{temp_dir}/end.crt'
- int_path = f'{temp_dir}/int.crt'
+ client.openssl_conf()
- with open(crt_path, 'wb') as crt, open(end_path, 'rb') as end, open(
- int_path, 'rb'
- ) as int:
- crt.write(end.read() + int.read())
-
- self.set_certificate_req_context()
-
- # incomplete chain
-
- assert 'success' in self.certificate_load(
- 'end', 'end'
- ), 'certificate chain end upload'
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'ecparam',
+ '-noout',
+ '-genkey',
+ '-out',
+ f'{temp_dir}/ec.key',
+ '-name',
+ 'prime256v1',
+ ],
+ stderr=subprocess.STDOUT,
+ )
- chain = self.conf_get('/certificates/end/chain')
- assert len(chain) == 1, 'certificate chain end length'
- assert (
- chain[0]['subject']['common_name'] == 'end'
- ), 'certificate chain end subject common name'
- assert (
- chain[0]['issuer']['common_name'] == 'int'
- ), 'certificate chain end issuer common name'
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'req',
+ '-x509',
+ '-new',
+ '-subj',
+ '/CN=ec/',
+ '-config',
+ f'{temp_dir}/openssl.conf',
+ '-key',
+ f'{temp_dir}/ec.key',
+ '-out',
+ f'{temp_dir}/ec.crt',
+ ],
+ stderr=subprocess.STDOUT,
+ )
- self.add_tls(cert='end')
+ client.certificate_load('ec')
- try:
- resp = self.get_ssl()
- except ssl.SSLError:
- resp = None
+ assert (
+ client.conf_get('/certificates/ec/key') == 'ECDH'
+ ), 'certificate key ec'
- assert resp == None, 'certificate chain incomplete chain'
- # intermediate
+def test_tls_certificate_chain_options(date_to_sec_epoch, sec_epoch):
+ client.load('empty')
+ date_format = '%b %d %X %Y %Z'
- assert 'success' in self.certificate_load(
- 'int', 'int'
- ), 'certificate chain int upload'
+ client.certificate()
- chain = self.conf_get('/certificates/int/chain')
- assert len(chain) == 1, 'certificate chain int length'
- assert (
- chain[0]['subject']['common_name'] == 'int'
- ), 'certificate chain int subject common name'
- assert (
- chain[0]['issuer']['common_name'] == 'root'
- ), 'certificate chain int issuer common name'
+ chain = client.conf_get('/certificates/default/chain')
- self.add_tls(cert='int')
+ assert len(chain) == 1, 'certificate chain length'
- assert self.get_ssl()['status'] == 200, 'certificate chain intermediate'
+ cert = chain[0]
- # intermediate server
+ assert (
+ cert['subject']['common_name'] == 'default'
+ ), 'certificate subject common name'
+ assert (
+ cert['issuer']['common_name'] == 'default'
+ ), 'certificate issuer common name'
- assert 'success' in self.certificate_load(
- 'end-int', 'end'
- ), 'certificate chain end-int upload'
+ assert (
+ abs(
+ sec_epoch
+ - date_to_sec_epoch(cert['validity']['since'], date_format)
+ )
+ < 60
+ ), 'certificate validity since'
+ assert (
+ date_to_sec_epoch(cert['validity']['until'], date_format)
+ - date_to_sec_epoch(cert['validity']['since'], date_format)
+ == 2592000
+ ), 'certificate validity until'
- chain = self.conf_get('/certificates/end-int/chain')
- assert len(chain) == 2, 'certificate chain end-int length'
- assert (
- chain[0]['subject']['common_name'] == 'end'
- ), 'certificate chain end-int int subject common name'
- assert (
- chain[0]['issuer']['common_name'] == 'int'
- ), 'certificate chain end-int int issuer common name'
- assert (
- chain[1]['subject']['common_name'] == 'int'
- ), 'certificate chain end-int end subject common name'
- assert (
- chain[1]['issuer']['common_name'] == 'root'
- ), 'certificate chain end-int end issuer common name'
- self.add_tls(cert='end-int')
+def test_tls_certificate_chain(temp_dir):
+ client.load('empty')
- assert (
- self.get_ssl()['status'] == 200
- ), 'certificate chain intermediate server'
+ client.certificate('root', False)
- def test_tls_certificate_chain_long(self, temp_dir):
- self.load('empty')
+ req('int')
+ req('end')
- self.generate_ca_conf()
+ generate_ca_conf()
- # Minimum chain length is 3.
- chain_length = 10
+ ca(cert='root', out='int')
+ ca(cert='int', out='end')
- for i in range(chain_length):
- if i == 0:
- self.certificate('root', False)
- elif i == chain_length - 1:
- self.req('end')
- else:
- self.req(f'int{i}')
+ crt_path = f'{temp_dir}/end-int.crt'
+ end_path = f'{temp_dir}/end.crt'
+ int_path = f'{temp_dir}/int.crt'
+
+ with open(crt_path, 'wb') as crt, open(end_path, 'rb') as end, open(
+ int_path, 'rb'
+ ) as int:
+ crt.write(end.read() + int.read())
+
+ # incomplete chain
+
+ assert 'success' in client.certificate_load(
+ 'end', 'end'
+ ), 'certificate chain end upload'
+
+ chain = client.conf_get('/certificates/end/chain')
+ assert len(chain) == 1, 'certificate chain end length'
+ assert (
+ chain[0]['subject']['common_name'] == 'end'
+ ), 'certificate chain end subject common name'
+ assert (
+ chain[0]['issuer']['common_name'] == 'int'
+ ), 'certificate chain end issuer common name'
+
+ add_tls(cert='end')
+
+ ctx_cert_req = context_cert_req()
+ try:
+ resp = client.get_ssl(context=ctx_cert_req)
+ except ssl.SSLError:
+ resp = None
+
+ assert resp is None, 'certificate chain incomplete chain'
+
+ # intermediate
+
+ assert 'success' in client.certificate_load(
+ 'int', 'int'
+ ), 'certificate chain int upload'
+
+ chain = client.conf_get('/certificates/int/chain')
+ assert len(chain) == 1, 'certificate chain int length'
+ assert (
+ chain[0]['subject']['common_name'] == 'int'
+ ), 'certificate chain int subject common name'
+ assert (
+ chain[0]['issuer']['common_name'] == 'root'
+ ), 'certificate chain int issuer common name'
+
+ add_tls(cert='int')
+
+ assert client.get_ssl()['status'] == 200, 'certificate chain intermediate'
+
+ # intermediate server
+
+ assert 'success' in client.certificate_load(
+ 'end-int', 'end'
+ ), 'certificate chain end-int upload'
+
+ chain = client.conf_get('/certificates/end-int/chain')
+ assert len(chain) == 2, 'certificate chain end-int length'
+ assert (
+ chain[0]['subject']['common_name'] == 'end'
+ ), 'certificate chain end-int int subject common name'
+ assert (
+ chain[0]['issuer']['common_name'] == 'int'
+ ), 'certificate chain end-int int issuer common name'
+ assert (
+ chain[1]['subject']['common_name'] == 'int'
+ ), 'certificate chain end-int end subject common name'
+ assert (
+ chain[1]['issuer']['common_name'] == 'root'
+ ), 'certificate chain end-int end issuer common name'
+
+ add_tls(cert='end-int')
+
+ assert (
+ client.get_ssl(context=ctx_cert_req)['status'] == 200
+ ), 'certificate chain intermediate server'
+
+
+def test_tls_certificate_chain_long(temp_dir):
+ client.load('empty')
- for i in range(chain_length - 1):
- if i == 0:
- self.ca(cert='root', out='int1')
- elif i == chain_length - 2:
- self.ca(cert=f'int{(chain_length - 2)}', out='end')
- else:
- self.ca(cert=f'int{i}', out=f'int{(i + 1)}')
+ generate_ca_conf()
- for i in range(chain_length - 1, 0, -1):
- path = (
- f'{temp_dir}/end.crt'
- if i == chain_length - 1
- else f'{temp_dir}/int{i}.crt'
- )
+ # Minimum chain length is 3.
+ chain_length = 10
+
+ for i in range(chain_length):
+ if i == 0:
+ client.certificate('root', False)
+ elif i == chain_length - 1:
+ req('end')
+ else:
+ req(f'int{i}')
+
+ for i in range(chain_length - 1):
+ if i == 0:
+ ca(cert='root', out='int1')
+ elif i == chain_length - 2:
+ ca(cert=f'int{(chain_length - 2)}', out='end')
+ else:
+ ca(cert=f'int{i}', out=f'int{(i + 1)}')
+
+ for i in range(chain_length - 1, 0, -1):
+ path = (
+ f'{temp_dir}/end.crt'
+ if i == chain_length - 1
+ else f'{temp_dir}/int{i}.crt'
+ )
- with open(f'{temp_dir}/all.crt', 'a') as chain, open(path) as cert:
- chain.write(cert.read())
+ with open(f'{temp_dir}/all.crt', 'a') as chain, open(path) as cert:
+ chain.write(cert.read())
- self.set_certificate_req_context()
+ assert 'success' in client.certificate_load(
+ 'all', 'end'
+ ), 'certificate chain upload'
- assert 'success' in self.certificate_load(
- 'all', 'end'
- ), 'certificate chain upload'
+ chain = client.conf_get('/certificates/all/chain')
+ assert len(chain) == chain_length - 1, 'certificate chain length'
- chain = self.conf_get('/certificates/all/chain')
- assert len(chain) == chain_length - 1, 'certificate chain length'
+ add_tls(cert='all')
- self.add_tls(cert='all')
+ assert (
+ client.get_ssl(context=context_cert_req())['status'] == 200
+ ), 'certificate chain long'
- assert self.get_ssl()['status'] == 200, 'certificate chain long'
- def test_tls_certificate_empty_cn(self, temp_dir):
- self.certificate('root', False)
+def test_tls_certificate_empty_cn():
+ client.certificate('root', False)
- self.req(subject='/')
+ req(subject='/')
- self.generate_ca_conf()
- self.ca()
+ generate_ca_conf()
+ ca()
- self.set_certificate_req_context()
+ assert 'success' in client.certificate_load('localhost', 'localhost')
- assert 'success' in self.certificate_load('localhost', 'localhost')
+ cert = client.conf_get('/certificates/localhost')
+ assert cert['chain'][0]['subject'] == {}, 'empty subject'
+ assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- cert = self.conf_get('/certificates/localhost')
- assert cert['chain'][0]['subject'] == {}, 'empty subject'
- assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- def test_tls_certificate_empty_cn_san(self, temp_dir):
- self.certificate('root', False)
+def test_tls_certificate_empty_cn_san():
+ client.certificate('root', False)
- self.openssl_conf(
- rewrite=True, alt_names=["example.com", "www.example.net"]
- )
+ client.openssl_conf(
+ rewrite=True, alt_names=["example.com", "www.example.net"]
+ )
- self.req(subject='/')
+ req(subject='/')
- self.generate_ca_conf()
- self.ca()
+ generate_ca_conf()
+ ca()
- self.set_certificate_req_context()
+ assert 'success' in client.certificate_load('localhost', 'localhost')
- assert 'success' in self.certificate_load('localhost', 'localhost')
+ cert = client.conf_get('/certificates/localhost')
+ assert cert['chain'][0]['subject'] == {
+ 'alt_names': ['example.com', 'www.example.net']
+ }, 'subject alt_names'
+ assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- cert = self.conf_get('/certificates/localhost')
- assert cert['chain'][0]['subject'] == {
- 'alt_names': ['example.com', 'www.example.net']
- }, 'subject alt_names'
- assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- def test_tls_certificate_empty_cn_san_ip(self):
- self.certificate('root', False)
+def test_tls_certificate_empty_cn_san_ip():
+ client.certificate('root', False)
- self.openssl_conf(
- rewrite=True,
- alt_names=['example.com', 'www.example.net', 'IP|10.0.0.1'],
- )
+ client.openssl_conf(
+ rewrite=True,
+ alt_names=['example.com', 'www.example.net', 'IP|10.0.0.1'],
+ )
- self.req(subject='/')
+ req(subject='/')
- self.generate_ca_conf()
- self.ca()
+ generate_ca_conf()
+ ca()
- self.set_certificate_req_context()
+ assert 'success' in client.certificate_load('localhost', 'localhost')
- assert 'success' in self.certificate_load('localhost', 'localhost')
+ cert = client.conf_get('/certificates/localhost')
+ assert cert['chain'][0]['subject'] == {
+ 'alt_names': ['example.com', 'www.example.net']
+ }, 'subject alt_names'
+ assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- cert = self.conf_get('/certificates/localhost')
- assert cert['chain'][0]['subject'] == {
- 'alt_names': ['example.com', 'www.example.net']
- }, 'subject alt_names'
- assert cert['chain'][0]['issuer']['common_name'] == 'root', 'issuer'
- def test_tls_keepalive(self):
- self.load('mirror')
+def test_tls_keepalive():
+ client.load('mirror')
- assert self.get()['status'] == 200, 'init'
+ assert client.get()['status'] == 200, 'init'
- self.certificate()
+ client.certificate()
- self.add_tls(application='mirror')
+ add_tls(application='mirror')
- (resp, sock) = self.post_ssl(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body='0123456789',
- read_timeout=1,
- )
+ (resp, sock) = client.post_ssl(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body='0123456789',
+ read_timeout=1,
+ )
- assert resp['body'] == '0123456789', 'keepalive 1'
+ assert resp['body'] == '0123456789', 'keepalive 1'
- resp = self.post_ssl(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
+ resp = client.post_ssl(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ },
+ sock=sock,
+ body='0123456789',
+ )
+
+ assert resp['body'] == '0123456789', 'keepalive 2'
+
+
+def test_tls_no_close_notify():
+ client.certificate()
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {
+ "pass": "routes",
+ "tls": {"certificate": "default"},
+ }
},
- sock=sock,
- body='0123456789',
- )
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ ), 'load application configuration'
- assert resp['body'] == '0123456789', 'keepalive 2'
-
- def test_tls_no_close_notify(self):
- self.certificate()
-
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {
- "pass": "routes",
- "tls": {"certificate": "default"},
- }
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- ), 'load application configuration'
+ (_, sock) = client.get_ssl(start=True)
- (resp, sock) = self.get_ssl(start=True)
+ time.sleep(5)
- time.sleep(5)
+ sock.close()
- sock.close()
- @pytest.mark.skip('not yet')
- def test_tls_keepalive_certificate_remove(self):
- self.load('empty')
+@pytest.mark.skip('not yet')
+def test_tls_keepalive_certificate_remove():
+ client.load('empty')
- assert self.get()['status'] == 200, 'init'
+ assert client.get()['status'] == 200, 'init'
- self.certificate()
+ client.certificate()
- self.add_tls()
+ add_tls()
- (resp, sock) = self.get_ssl(
- headers={'Host': 'localhost', 'Connection': 'keep-alive'},
- start=True,
- read_timeout=1,
- )
+ (resp, sock) = client.get_ssl(
+ headers={'Host': 'localhost', 'Connection': 'keep-alive'},
+ start=True,
+ read_timeout=1,
+ )
- assert 'success' in self.conf(
- {"pass": "applications/empty"}, 'listeners/*:7080'
- )
- assert 'success' in self.conf_delete('/certificates/default')
+ assert 'success' in client.conf(
+ {"pass": "applications/empty"}, 'listeners/*:7080'
+ )
+ assert 'success' in client.conf_delete('/certificates/default')
- try:
- resp = self.get_ssl(sock=sock)
+ try:
+ resp = client.get_ssl(sock=sock)
- except KeyboardInterrupt:
- raise
+ except KeyboardInterrupt:
+ raise
- except:
- resp = None
+ except:
+ resp = None
- assert resp == None, 'keepalive remove certificate'
+ assert resp is None, 'keepalive remove certificate'
- @pytest.mark.skip('not yet')
- def test_tls_certificates_remove_all(self):
- self.load('empty')
- self.certificate()
+@pytest.mark.skip('not yet')
+def test_tls_certificates_remove_all():
+ client.load('empty')
- assert 'success' in self.conf_delete(
- '/certificates'
- ), 'remove all certificates'
+ client.certificate()
- def test_tls_application_respawn(self, skip_alert):
- self.load('mirror')
+ assert 'success' in client.conf_delete(
+ '/certificates'
+ ), 'remove all certificates'
- self.certificate()
- assert 'success' in self.conf('1', 'applications/mirror/processes')
+def test_tls_application_respawn(findall, skip_alert, wait_for_record):
+ client.load('mirror')
- self.add_tls(application='mirror')
+ client.certificate()
- (_, sock) = self.post_ssl(
- headers={
- 'Host': 'localhost',
- 'Connection': 'keep-alive',
- },
- start=True,
- body='0123456789',
- read_timeout=1,
- )
+ assert 'success' in client.conf('1', 'applications/mirror/processes')
- app_id = self.findall(r'(\d+)#\d+ "mirror" application started')[0]
+ add_tls(application='mirror')
- subprocess.check_output(['kill', '-9', app_id])
+ (_, sock) = client.post_ssl(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'keep-alive',
+ },
+ start=True,
+ body='0123456789',
+ read_timeout=1,
+ )
- skip_alert(fr'process {app_id} exited on signal 9')
+ app_id = findall(r'(\d+)#\d+ "mirror" application started')[0]
- self.wait_for_record(
- fr' (?!{app_id}#)(\d+)#\d+ "mirror" application started'
- )
+ subprocess.check_output(['kill', '-9', app_id])
- resp = self.post_ssl(sock=sock, body='0123456789')
+ skip_alert(fr'process {app_id} exited on signal 9')
- assert resp['status'] == 200, 'application respawn status'
- assert resp['body'] == '0123456789', 'application respawn body'
+ wait_for_record(fr' (?!{app_id}#)(\d+)#\d+ "mirror" application started')
- def test_tls_url_scheme(self):
- self.load('variables')
+ resp = client.post_ssl(sock=sock, body='0123456789')
- assert (
- self.post(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': '',
- 'Connection': 'close',
- }
- )['headers']['Wsgi-Url-Scheme']
- == 'http'
- ), 'url scheme http'
+ assert resp['status'] == 200, 'application respawn status'
+ assert resp['body'] == '0123456789', 'application respawn body'
- self.certificate()
- self.add_tls(application='variables')
+def test_tls_url_scheme():
+ client.load('variables')
- assert (
- self.post_ssl(
- headers={
- 'Host': 'localhost',
- 'Content-Type': 'text/html',
- 'Custom-Header': '',
- 'Connection': 'close',
- }
- )['headers']['Wsgi-Url-Scheme']
- == 'https'
- ), 'url scheme https'
+ assert (
+ client.post(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Type': 'text/html',
+ 'Custom-Header': '',
+ 'Connection': 'close',
+ }
+ )['headers']['Wsgi-Url-Scheme']
+ == 'http'
+ ), 'url scheme http'
- def test_tls_big_upload(self):
- self.load('upload')
+ client.certificate()
- self.certificate()
+ add_tls(application='variables')
- self.add_tls(application='upload')
+ assert (
+ client.post_ssl(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Type': 'text/html',
+ 'Custom-Header': '',
+ 'Connection': 'close',
+ }
+ )['headers']['Wsgi-Url-Scheme']
+ == 'https'
+ ), 'url scheme https'
- filename = 'test.txt'
- data = '0123456789' * 9000
- res = self.post_ssl(
- body={
- 'file': {
- 'filename': filename,
- 'type': 'text/plain',
- 'data': io.StringIO(data),
- }
+def test_tls_big_upload():
+ client.load('upload')
+
+ client.certificate()
+
+ add_tls(application='upload')
+
+ filename = 'test.txt'
+ data = '0123456789' * 9000
+
+ res = client.post_ssl(
+ body={
+ 'file': {
+ 'filename': filename,
+ 'type': 'text/plain',
+ 'data': io.StringIO(data),
}
- )
- assert res['status'] == 200, 'status ok'
- assert res['body'] == f'{filename}{data}'
+ }
+ )
+ assert res['status'] == 200, 'status ok'
+ assert res['body'] == f'{filename}{data}'
+
- def test_tls_multi_listener(self):
- self.load('empty')
+def test_tls_multi_listener():
+ client.load('empty')
- self.certificate()
+ client.certificate()
- self.add_tls()
- self.add_tls(port=7081)
+ add_tls()
+ add_tls(port=7081)
- assert self.get_ssl()['status'] == 200, 'listener #1'
+ assert client.get_ssl()['status'] == 200, 'listener #1'
- assert self.get_ssl(port=7081)['status'] == 200, 'listener #2'
+ assert client.get_ssl(port=7081)['status'] == 200, 'listener #2'
diff --git a/test/test_tls_conf_command.py b/test/test_tls_conf_command.py
index 605848ea..49df7bf3 100644
--- a/test/test_tls_conf_command.py
+++ b/test/test_tls_conf_command.py
@@ -1,111 +1,118 @@
import ssl
import pytest
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestTLSConfCommand(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request):
- self.certificate()
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {
- "pass": "routes",
- "tls": {"certificate": "default"},
- }
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- ), 'load application configuration'
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ client.certificate()
- def test_tls_conf_command(self):
- def check_no_connection():
- try:
- self.get_ssl()
- pytest.fail('Unexpected connection.')
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {
+ "pass": "routes",
+ "tls": {"certificate": "default"},
+ }
+ },
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ ), 'load application configuration'
- except (ssl.SSLError, ConnectionRefusedError):
- pass
- # Set one conf_commands (disable protocol).
+def test_tls_conf_command():
+ def check_no_connection():
+ try:
+ client.get_ssl()
+ pytest.fail('Unexpected connection.')
- (resp, sock) = self.get_ssl(start=True)
+ except (ssl.SSLError, ConnectionRefusedError):
+ pass
- shared_ciphers = sock.shared_ciphers()
- protocols = list(set(c[1] for c in shared_ciphers))
- protocol = sock.cipher()[1]
+ # Set one conf_commands (disable protocol).
- if '/' in protocol:
- pytest.skip('Complex protocol format.')
+ (_, sock) = client.get_ssl(start=True)
- assert 'success' in self.conf(
- {
- "certificate": "default",
- "conf_commands": {"protocol": f'-{protocol}'},
- },
- 'listeners/*:7080/tls',
- ), 'protocol disabled'
+ shared_ciphers = sock.shared_ciphers()
- sock.close()
+ if not shared_ciphers:
+ pytest.skip('no shared ciphers')
+
+ protocols = list(set(c[1] for c in shared_ciphers))
+ protocol = sock.cipher()[1]
- if len(protocols) > 1:
- (resp, sock) = self.get_ssl(start=True)
+ if '/' in protocol:
+ pytest.skip('Complex protocol format.')
- cipher = sock.cipher()
- assert cipher[1] != protocol, 'new protocol used'
+ assert 'success' in client.conf(
+ {
+ "certificate": "default",
+ "conf_commands": {"protocol": f'-{protocol}'},
+ },
+ 'listeners/*:7080/tls',
+ ), 'protocol disabled'
- shared_ciphers = sock.shared_ciphers()
- ciphers = list(set(c for c in shared_ciphers if c[1] == cipher[1]))
+ sock.close()
- sock.close()
- else:
- check_no_connection()
- pytest.skip('One TLS protocol available only.')
+ if len(protocols) > 1:
+ (_, sock) = client.get_ssl(start=True)
- # Set two conf_commands (disable protocol and cipher).
+ cipher = sock.cipher()
+ assert cipher[1] != protocol, 'new protocol used'
- assert 'success' in self.conf(
- {
- "certificate": "default",
- "conf_commands": {
- "protocol": f'-{protocol}',
- "cipherstring": f"{cipher[1]}:!{cipher[0]}",
- },
+ shared_ciphers = sock.shared_ciphers()
+ ciphers = list(set(c for c in shared_ciphers if c[1] == cipher[1]))
+
+ sock.close()
+ else:
+ check_no_connection()
+ pytest.skip('One TLS protocol available only.')
+
+ # Set two conf_commands (disable protocol and cipher).
+
+ assert 'success' in client.conf(
+ {
+ "certificate": "default",
+ "conf_commands": {
+ "protocol": f'-{protocol}',
+ "cipherstring": f"{cipher[1]}:!{cipher[0]}",
},
- 'listeners/*:7080/tls',
- ), 'cipher disabled'
+ },
+ 'listeners/*:7080/tls',
+ ), 'cipher disabled'
- if len(ciphers) > 1:
- (resp, sock) = self.get_ssl(start=True)
+ if len(ciphers) > 1:
+ (_, sock) = client.get_ssl(start=True)
- cipher_new = sock.cipher()
- assert cipher_new[1] == cipher[1], 'previous protocol used'
- assert cipher_new[0] != cipher[0], 'new cipher used'
+ cipher_new = sock.cipher()
+ assert cipher_new[1] == cipher[1], 'previous protocol used'
+ assert cipher_new[0] != cipher[0], 'new cipher used'
- sock.close()
+ sock.close()
- else:
- check_no_connection()
+ else:
+ check_no_connection()
- def test_tls_conf_command_invalid(self, skip_alert):
- skip_alert(r'SSL_CONF_cmd', r'failed to apply new conf')
- def check_conf_commands(conf_commands):
- assert 'error' in self.conf(
- {"certificate": "default", "conf_commands": conf_commands},
- 'listeners/*:7080/tls',
- ), 'ivalid conf_commands'
+def test_tls_conf_command_invalid(skip_alert):
+ skip_alert(r'SSL_CONF_cmd', r'failed to apply new conf')
- check_conf_commands([])
- check_conf_commands("blah")
- check_conf_commands({"": ""})
- check_conf_commands({"blah": ""})
- check_conf_commands({"protocol": {}})
- check_conf_commands({"protocol": "blah"})
- check_conf_commands({"protocol": "TLSv1.2", "blah": ""})
+ def check_conf_commands(conf_commands):
+ assert 'error' in client.conf(
+ {"certificate": "default", "conf_commands": conf_commands},
+ 'listeners/*:7080/tls',
+ ), 'ivalid conf_commands'
+
+ check_conf_commands([])
+ check_conf_commands("blah")
+ check_conf_commands({"": ""})
+ check_conf_commands({"blah": ""})
+ check_conf_commands({"protocol": {}})
+ check_conf_commands({"protocol": "blah"})
+ check_conf_commands({"protocol": "TLSv1.2", "blah": ""})
diff --git a/test/test_tls_session.py b/test/test_tls_session.py
index 58f11f2d..8b2b04fd 100644
--- a/test/test_tls_session.py
+++ b/test/test_tls_session.py
@@ -12,115 +12,129 @@ from OpenSSL.SSL import (
Connection,
_lib,
)
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestTLSSession(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request):
- self.certificate()
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {
- "pass": "routes",
- "tls": {"certificate": "default", "session": {}},
- }
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- ), 'load application configuration'
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ client.certificate()
- def add_session(self, cache_size=None, timeout=None):
- session = {}
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {
+ "pass": "routes",
+ "tls": {"certificate": "default", "session": {}},
+ }
+ },
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ ), 'load application configuration'
- if cache_size is not None:
- session['cache_size'] = cache_size
- if timeout is not None:
- session['timeout'] = timeout
- return self.conf(session, 'listeners/*:7080/tls/session')
+def add_session(cache_size=None, timeout=None):
+ session = {}
- def connect(self, ctx=None, session=None):
- sock = socket.create_connection(('127.0.0.1', 7080))
+ if cache_size is not None:
+ session['cache_size'] = cache_size
+ if timeout is not None:
+ session['timeout'] = timeout
- if ctx is None:
- ctx = Context(TLSv1_2_METHOD)
- ctx.set_session_cache_mode(SESS_CACHE_CLIENT)
- ctx.set_options(OP_NO_TICKET)
+ return client.conf(session, 'listeners/*:7080/tls/session')
- client = Connection(ctx, sock)
- client.set_connect_state()
- if session is not None:
- client.set_session(session)
+def connect(ctx=None, session=None):
+ sock = socket.create_connection(('127.0.0.1', 7080))
- client.do_handshake()
- client.shutdown()
+ if ctx is None:
+ ctx = Context(TLSv1_2_METHOD)
+ ctx.set_session_cache_mode(SESS_CACHE_CLIENT)
+ ctx.set_options(OP_NO_TICKET)
- return (
- client,
- client.get_session(),
- ctx,
- _lib.SSL_session_reused(client._ssl),
- )
+ conn = Connection(ctx, sock)
+ conn.set_connect_state()
- def test_tls_session(self):
- client, sess, ctx, reused = self.connect()
- assert not reused, 'new connection'
+ if session is not None:
+ conn.set_session(session)
- client, _, _, reused = self.connect(ctx, sess)
- assert not reused, 'no cache'
+ conn.do_handshake()
+ conn.shutdown()
- assert 'success' in self.add_session(cache_size=2)
+ return (
+ conn,
+ conn.get_session(),
+ ctx,
+ _lib.SSL_session_reused(conn._ssl),
+ )
- client, sess, ctx, reused = self.connect()
- assert not reused, 'new connection cache'
- client, _, _, reused = self.connect(ctx, sess)
- assert reused, 'cache'
+@pytest.mark.skipif(
+ not hasattr(_lib, 'SSL_session_reused'),
+ reason='session reuse is not supported',
+)
+def test_tls_session():
+ _, sess, ctx, reused = connect()
+ assert not reused, 'new connection'
+
+ _, _, _, reused = connect(ctx, sess)
+ assert not reused, 'no cache'
+
+ assert 'success' in add_session(cache_size=2)
+
+ _, sess, ctx, reused = connect()
+ assert not reused, 'new connection cache'
- client, _, _, reused = self.connect(ctx, sess)
- assert reused, 'cache 2'
+ _, _, _, reused = connect(ctx, sess)
+ assert reused, 'cache'
- # check that at least one session of four is not reused
+ _, _, _, reused = connect(ctx, sess)
+ assert reused, 'cache 2'
- clients = [self.connect() for _ in range(4)]
- assert True not in [c[-1] for c in clients], 'cache small all new'
+ # check that at least one session of four is not reused
- clients_again = [self.connect(c[2], c[1]) for c in clients]
- assert False in [c[-1] for c in clients_again], 'cache small no reuse'
+ conns = [connect() for _ in range(4)]
+ assert True not in [c[-1] for c in conns], 'cache small all new'
- # all four sessions are reused
+ conns_again = [connect(c[2], c[1]) for c in conns]
+ assert False in [c[-1] for c in conns_again], 'cache small no reuse'
- assert 'success' in self.add_session(cache_size=8)
+ # all four sessions are reused
- clients = [self.connect() for _ in range(4)]
- assert True not in [c[-1] for c in clients], 'cache big all new'
+ assert 'success' in add_session(cache_size=8)
- clients_again = [self.connect(c[2], c[1]) for c in clients]
- assert False not in [c[-1] for c in clients_again], 'cache big reuse'
+ conns = [connect() for _ in range(4)]
+ assert True not in [c[-1] for c in conns], 'cache big all new'
+
+ conns_again = [connect(c[2], c[1]) for c in conns]
+ assert False not in [c[-1] for c in conns_again], 'cache big reuse'
+
+
+@pytest.mark.skipif(
+ not hasattr(_lib, 'SSL_session_reused'),
+ reason='session reuse is not supported',
+)
+def test_tls_session_timeout():
+ assert 'success' in add_session(cache_size=5, timeout=1)
- def test_tls_session_timeout(self):
- assert 'success' in self.add_session(cache_size=5, timeout=1)
+ _, sess, ctx, reused = connect()
+ assert not reused, 'new connection'
- client, sess, ctx, reused = self.connect()
- assert not reused, 'new connection'
+ _, _, _, reused = connect(ctx, sess)
+ assert reused, 'no timeout'
- client, _, _, reused = self.connect(ctx, sess)
- assert reused, 'no timeout'
+ time.sleep(3)
- time.sleep(3)
+ _, _, _, reused = connect(ctx, sess)
+ assert not reused, 'timeout'
- client, _, _, reused = self.connect(ctx, sess)
- assert not reused, 'timeout'
- def test_tls_session_invalid(self):
- assert 'error' in self.add_session(cache_size=-1)
- assert 'error' in self.add_session(cache_size={})
- assert 'error' in self.add_session(timeout=-1)
- assert 'error' in self.add_session(timeout={})
+def test_tls_session_invalid():
+ assert 'error' in add_session(cache_size=-1)
+ assert 'error' in add_session(cache_size={})
+ assert 'error' in add_session(timeout=-1)
+ assert 'error' in add_session(timeout={})
diff --git a/test/test_tls_sni.py b/test/test_tls_sni.py
index e918bb20..253d9813 100644
--- a/test/test_tls_sni.py
+++ b/test/test_tls_sni.py
@@ -1,38 +1,112 @@
import ssl
import subprocess
-from unit.applications.tls import TestApplicationTLS
+import pytest
+from unit.applications.tls import ApplicationTLS
from unit.option import option
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestTLSSNI(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- def setup_method(self):
- self._load_conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- )
- def openssl_date_to_sec_epoch(self, date):
- return self.date_to_sec_epoch(date, '%b %d %X %Y %Z')
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
+ }
+ )
+
+
+def add_tls(cert='default'):
+ assert 'success' in client.conf(
+ {"pass": "routes", "tls": {"certificate": cert}},
+ 'listeners/*:7080',
+ )
+
+
+def check_cert(host, expect, ctx):
+ resp, sock = client.get_ssl(
+ headers={
+ 'Host': host,
+ 'Content-Length': '0',
+ 'Connection': 'close',
+ },
+ start=True,
+ context=ctx,
+ )
+
+ assert resp['status'] == 200
+ assert sock.getpeercert()['subject'][0][0][1] == expect
+
+
+def config_bundles(bundles):
+ client.certificate('root', False)
+
+ for b in bundles:
+ client.openssl_conf(rewrite=True, alt_names=bundles[b]['alt_names'])
+ subj = f'/CN={bundles[b]["subj"]}/' if 'subj' in bundles[b] else '/'
+
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'req',
+ '-new',
+ '-subj',
+ subj,
+ '-config',
+ f'{option.temp_dir}/openssl.conf',
+ '-out',
+ f'{option.temp_dir}/{b}.csr',
+ '-keyout',
+ f'{option.temp_dir}/{b}.key',
+ ],
+ stderr=subprocess.STDOUT,
+ )
- def add_tls(self, cert='default'):
- assert 'success' in self.conf(
- {"pass": "routes", "tls": {"certificate": cert}},
- 'listeners/*:7080',
+ generate_ca_conf()
+
+ for b in bundles:
+ subj = f'/CN={bundles[b]["subj"]}/' if 'subj' in bundles[b] else '/'
+
+ subprocess.check_output(
+ [
+ 'openssl',
+ 'ca',
+ '-batch',
+ '-subj',
+ subj,
+ '-config',
+ f'{option.temp_dir}/ca.conf',
+ '-keyfile',
+ f'{option.temp_dir}/root.key',
+ '-cert',
+ f'{option.temp_dir}/root.crt',
+ '-in',
+ f'{option.temp_dir}/{b}.csr',
+ '-out',
+ f'{option.temp_dir}/{b}.crt',
+ ],
+ stderr=subprocess.STDOUT,
)
- def remove_tls(self):
- assert 'success' in self.conf({"pass": "routes"}, 'listeners/*:7080')
+ load_certs(bundles)
+
+ context = ssl.create_default_context()
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_REQUIRED
+ context.load_verify_locations(f'{option.temp_dir}/root.crt')
- def generate_ca_conf(self):
- with open(f'{option.temp_dir}/ca.conf', 'w') as f:
- f.write(
- f"""[ ca ]
+ return context
+
+
+def generate_ca_conf():
+ with open(f'{option.temp_dir}/ca.conf', 'w') as f:
+ f.write(
+ f"""[ ca ]
default_ca = myca
[ myca ]
@@ -50,231 +124,177 @@ commonName = optional
[ myca_extensions ]
basicConstraints = critical,CA:TRUE"""
- )
-
- with open(f'{option.temp_dir}/certserial', 'w') as f:
- f.write('1000')
-
- with open(f'{option.temp_dir}/certindex', 'w') as f:
- f.write('')
-
- def config_bundles(self, bundles):
- self.certificate('root', False)
-
- for b in bundles:
- self.openssl_conf(rewrite=True, alt_names=bundles[b]['alt_names'])
- subj = f'/CN={bundles[b]["subj"]}/' if 'subj' in bundles[b] else '/'
-
- subprocess.check_output(
- [
- 'openssl',
- 'req',
- '-new',
- '-subj',
- subj,
- '-config',
- f'{option.temp_dir}/openssl.conf',
- '-out',
- f'{option.temp_dir}/{b}.csr',
- '-keyout',
- f'{option.temp_dir}/{b}.key',
- ],
- stderr=subprocess.STDOUT,
- )
-
- self.generate_ca_conf()
-
- for b in bundles:
- subj = f'/CN={bundles[b]["subj"]}/' if 'subj' in bundles[b] else '/'
-
- subprocess.check_output(
- [
- 'openssl',
- 'ca',
- '-batch',
- '-subj',
- subj,
- '-config',
- f'{option.temp_dir}/ca.conf',
- '-keyfile',
- f'{option.temp_dir}/root.key',
- '-cert',
- f'{option.temp_dir}/root.crt',
- '-in',
- f'{option.temp_dir}/{b}.csr',
- '-out',
- f'{option.temp_dir}/{b}.crt',
- ],
- stderr=subprocess.STDOUT,
- )
-
- self.context = ssl.create_default_context()
- self.context.check_hostname = False
- self.context.verify_mode = ssl.CERT_REQUIRED
- self.context.load_verify_locations(f'{option.temp_dir}/root.crt')
-
- self.load_certs(bundles)
-
- def load_certs(self, bundles):
- for bname, bvalue in bundles.items():
- assert 'success' in self.certificate_load(
- bname, bname
- ), f'certificate {bvalue["subj"]} upload'
-
- def check_cert(self, host, expect):
- resp, sock = self.get_ssl(
- headers={
- 'Host': host,
- 'Content-Length': '0',
- 'Connection': 'close',
- },
- start=True,
)
- assert resp['status'] == 200
- assert sock.getpeercert()['subject'][0][0][1] == expect
-
- def test_tls_sni(self):
- bundles = {
- "default": {"subj": "default", "alt_names": ["default"]},
- "localhost.com": {
- "subj": "localhost.com",
- "alt_names": ["alt1.localhost.com"],
- },
- "example.com": {
- "subj": "example.com",
- "alt_names": ["alt1.example.com", "alt2.example.com"],
- },
+ with open(f'{option.temp_dir}/certserial', 'w') as f:
+ f.write('1000')
+
+ with open(f'{option.temp_dir}/certindex', 'w') as f:
+ f.write('')
+
+
+def load_certs(bundles):
+ for bname, bvalue in bundles.items():
+ assert 'success' in client.certificate_load(
+ bname, bname
+ ), f'certificate {bvalue["subj"]} upload'
+
+
+def remove_tls():
+ assert 'success' in client.conf({"pass": "routes"}, 'listeners/*:7080')
+
+
+def test_tls_sni():
+ bundles = {
+ "default": {"subj": "default", "alt_names": ["default"]},
+ "localhost.com": {
+ "subj": "localhost.com",
+ "alt_names": ["alt1.localhost.com"],
+ },
+ "example.com": {
+ "subj": "example.com",
+ "alt_names": ["alt1.example.com", "alt2.example.com"],
+ },
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["default", "localhost.com", "example.com"])
+
+ check_cert('alt1.localhost.com', bundles['localhost.com']['subj'], ctx)
+ check_cert('alt2.example.com', bundles['example.com']['subj'], ctx)
+ check_cert('blah', bundles['default']['subj'], ctx)
+
+
+def test_tls_sni_no_hostname():
+ bundles = {
+ "localhost.com": {"subj": "localhost.com", "alt_names": []},
+ "example.com": {
+ "subj": "example.com",
+ "alt_names": ["example.com"],
+ },
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost.com", "example.com"])
+
+ resp, sock = client.get_ssl(
+ headers={'Content-Length': '0', 'Connection': 'close'},
+ start=True,
+ context=ctx,
+ )
+ assert resp['status'] == 200
+ assert (
+ sock.getpeercert()['subject'][0][0][1]
+ == bundles['localhost.com']['subj']
+ )
+
+
+def test_tls_sni_upper_case():
+ bundles = {
+ "localhost.com": {"subj": "LOCALHOST.COM", "alt_names": []},
+ "example.com": {
+ "subj": "example.com",
+ "alt_names": ["ALT1.EXAMPLE.COM", "*.ALT2.EXAMPLE.COM"],
+ },
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost.com", "example.com"])
+
+ check_cert('localhost.com', bundles['localhost.com']['subj'], ctx)
+ check_cert('LOCALHOST.COM', bundles['localhost.com']['subj'], ctx)
+ check_cert('EXAMPLE.COM', bundles['localhost.com']['subj'], ctx)
+ check_cert('ALT1.EXAMPLE.COM', bundles['example.com']['subj'], ctx)
+ check_cert('WWW.ALT2.EXAMPLE.COM', bundles['example.com']['subj'], ctx)
+
+
+def test_tls_sni_only_bundle():
+ bundles = {
+ "localhost.com": {
+ "subj": "localhost.com",
+ "alt_names": ["alt1.localhost.com", "alt2.localhost.com"],
}
- self.config_bundles(bundles)
- self.add_tls(["default", "localhost.com", "example.com"])
-
- self.check_cert('alt1.localhost.com', bundles['localhost.com']['subj'])
- self.check_cert('alt2.example.com', bundles['example.com']['subj'])
- self.check_cert('blah', bundles['default']['subj'])
-
- def test_tls_sni_no_hostname(self):
- bundles = {
- "localhost.com": {"subj": "localhost.com", "alt_names": []},
- "example.com": {
- "subj": "example.com",
- "alt_names": ["example.com"],
- },
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost.com"])
+
+ check_cert('domain.com', bundles['localhost.com']['subj'], ctx)
+ check_cert('alt1.domain.com', bundles['localhost.com']['subj'], ctx)
+
+
+def test_tls_sni_wildcard():
+ bundles = {
+ "localhost.com": {"subj": "localhost.com", "alt_names": []},
+ "example.com": {
+ "subj": "example.com",
+ "alt_names": ["*.example.com", "*.alt.example.com"],
+ },
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost.com", "example.com"])
+
+ check_cert('example.com', bundles['localhost.com']['subj'], ctx)
+ check_cert('www.example.com', bundles['example.com']['subj'], ctx)
+ check_cert('alt.example.com', bundles['example.com']['subj'], ctx)
+ check_cert('www.alt.example.com', bundles['example.com']['subj'], ctx)
+ check_cert('www.alt.example.ru', bundles['localhost.com']['subj'], ctx)
+
+
+def test_tls_sni_duplicated_bundle():
+ bundles = {
+ "localhost.com": {
+ "subj": "localhost.com",
+ "alt_names": ["localhost.com", "alt2.localhost.com"],
}
- self.config_bundles(bundles)
- self.add_tls(["localhost.com", "example.com"])
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost.com", "localhost.com"])
- resp, sock = self.get_ssl(
- headers={'Content-Length': '0', 'Connection': 'close'},
- start=True,
- )
- assert resp['status'] == 200
- assert (
- sock.getpeercert()['subject'][0][0][1]
- == bundles['localhost.com']['subj']
- )
+ check_cert('localhost.com', bundles['localhost.com']['subj'], ctx)
+ check_cert('alt2.localhost.com', bundles['localhost.com']['subj'], ctx)
- def test_tls_sni_upper_case(self):
- bundles = {
- "localhost.com": {"subj": "LOCALHOST.COM", "alt_names": []},
- "example.com": {
- "subj": "example.com",
- "alt_names": ["ALT1.EXAMPLE.COM", "*.ALT2.EXAMPLE.COM"],
- },
- }
- self.config_bundles(bundles)
- self.add_tls(["localhost.com", "example.com"])
-
- self.check_cert('localhost.com', bundles['localhost.com']['subj'])
- self.check_cert('LOCALHOST.COM', bundles['localhost.com']['subj'])
- self.check_cert('EXAMPLE.COM', bundles['localhost.com']['subj'])
- self.check_cert('ALT1.EXAMPLE.COM', bundles['example.com']['subj'])
- self.check_cert('WWW.ALT2.EXAMPLE.COM', bundles['example.com']['subj'])
-
- def test_tls_sni_only_bundle(self):
- bundles = {
- "localhost.com": {
- "subj": "localhost.com",
- "alt_names": ["alt1.localhost.com", "alt2.localhost.com"],
- }
- }
- self.config_bundles(bundles)
- self.add_tls(["localhost.com"])
-
- self.check_cert('domain.com', bundles['localhost.com']['subj'])
- self.check_cert('alt1.domain.com', bundles['localhost.com']['subj'])
-
- def test_tls_sni_wildcard(self):
- bundles = {
- "localhost.com": {"subj": "localhost.com", "alt_names": []},
- "example.com": {
- "subj": "example.com",
- "alt_names": ["*.example.com", "*.alt.example.com"],
- },
- }
- self.config_bundles(bundles)
- self.add_tls(["localhost.com", "example.com"])
-
- self.check_cert('example.com', bundles['localhost.com']['subj'])
- self.check_cert('www.example.com', bundles['example.com']['subj'])
- self.check_cert('alt.example.com', bundles['example.com']['subj'])
- self.check_cert('www.alt.example.com', bundles['example.com']['subj'])
- self.check_cert('www.alt.example.ru', bundles['localhost.com']['subj'])
-
- def test_tls_sni_duplicated_bundle(self):
- bundles = {
- "localhost.com": {
- "subj": "localhost.com",
- "alt_names": ["localhost.com", "alt2.localhost.com"],
- }
- }
- self.config_bundles(bundles)
- self.add_tls(["localhost.com", "localhost.com"])
- self.check_cert('localhost.com', bundles['localhost.com']['subj'])
- self.check_cert('alt2.localhost.com', bundles['localhost.com']['subj'])
+def test_tls_sni_same_alt():
+ bundles = {
+ "localhost": {"subj": "subj1", "alt_names": "same.altname.com"},
+ "example": {"subj": "subj2", "alt_names": "same.altname.com"},
+ }
+ ctx = config_bundles(bundles)
+ add_tls(["localhost", "example"])
- def test_tls_sni_same_alt(self):
- bundles = {
- "localhost": {"subj": "subj1", "alt_names": "same.altname.com"},
- "example": {"subj": "subj2", "alt_names": "same.altname.com"},
- }
- self.config_bundles(bundles)
- self.add_tls(["localhost", "example"])
-
- self.check_cert('localhost', bundles['localhost']['subj'])
- self.check_cert('example', bundles['localhost']['subj'])
-
- def test_tls_sni_empty_cn(self):
- bundles = {"localhost": {"alt_names": ["alt.localhost.com"]}}
- self.config_bundles(bundles)
- self.add_tls(["localhost"])
-
- resp, sock = self.get_ssl(
- headers={
- 'Host': 'domain.com',
- 'Content-Length': '0',
- 'Connection': 'close',
- },
- start=True,
+ check_cert('localhost', bundles['localhost']['subj'], ctx)
+ check_cert('example', bundles['localhost']['subj'], ctx)
+
+
+def test_tls_sni_empty_cn():
+ bundles = {"localhost": {"alt_names": ["alt.localhost.com"]}}
+ ctx = config_bundles(bundles)
+ add_tls(["localhost"])
+
+ resp, sock = client.get_ssl(
+ headers={
+ 'Host': 'domain.com',
+ 'Content-Length': '0',
+ 'Connection': 'close',
+ },
+ start=True,
+ context=ctx,
+ )
+
+ assert resp['status'] == 200
+ assert sock.getpeercert()['subjectAltName'][0][1] == 'alt.localhost.com'
+
+
+def test_tls_sni_invalid():
+ _ = config_bundles({"localhost": {"subj": "subj1", "alt_names": ''}})
+ add_tls(["localhost"])
+
+ def check_certificate(cert):
+ assert 'error' in client.conf(
+ {"pass": "routes", "tls": {"certificate": cert}},
+ 'listeners/*:7080',
)
- assert resp['status'] == 200
- assert sock.getpeercert()['subjectAltName'][0][1] == 'alt.localhost.com'
-
- def test_tls_sni_invalid(self):
- self.config_bundles({"localhost": {"subj": "subj1", "alt_names": ''}})
- self.add_tls(["localhost"])
-
- def check_certificate(cert):
- assert 'error' in self.conf(
- {"pass": "routes", "tls": {"certificate": cert}},
- 'listeners/*:7080',
- )
-
- check_certificate('')
- check_certificate('blah')
- check_certificate([])
- check_certificate(['blah'])
- check_certificate(['localhost', 'blah'])
- check_certificate(['localhost', []])
+ check_certificate('')
+ check_certificate('blah')
+ check_certificate([])
+ check_certificate(['blah'])
+ check_certificate(['localhost', 'blah'])
+ check_certificate(['localhost', []])
diff --git a/test/test_tls_tickets.py b/test/test_tls_tickets.py
index cca230f3..0d8e4f36 100644
--- a/test/test_tls_tickets.py
+++ b/test/test_tls_tickets.py
@@ -7,189 +7,196 @@ from OpenSSL.SSL import (
TLSv1_2_METHOD,
Context,
Connection,
- Session,
_lib,
)
-from unit.applications.tls import TestApplicationTLS
+from unit.applications.tls import ApplicationTLS
+prerequisites = {'modules': {'openssl': 'any'}}
-class TestTLSTicket(TestApplicationTLS):
- prerequisites = {'modules': {'openssl': 'any'}}
+client = ApplicationTLS()
- ticket = 'U1oDTh11mMxODuw12gS0EXX1E/PkZG13cJNQ6m5+6BGlfPTjNlIEw7PSVU3X1gTE'
- ticket2 = '5AV0DSYIYbZWZQB7fCnTHZmMxtotb/aXjam+n2XS79lTvX3Tq9xGqpC8XKNEF2lt'
- ticket80 = '6Pfil8lv/k8zf8MndPpfXaO5EAV6dhME6zs6CfUyq2yziynQwSywtKQMqHGnJ2HR\
+TICKET = 'U1oDTh11mMxODuw12gS0EXX1E/PkZG13cJNQ6m5+6BGlfPTjNlIEw7PSVU3X1gTE'
+TICKET2 = '5AV0DSYIYbZWZQB7fCnTHZmMxtotb/aXjam+n2XS79lTvX3Tq9xGqpC8XKNEF2lt'
+TICKET80 = '6Pfil8lv/k8zf8MndPpfXaO5EAV6dhME6zs6CfUyq2yziynQwSywtKQMqHGnJ2HR\
49TZXi/Y4/8RSIO7QPsU51/HLR1gWIMhVM2m9yh93Bw='
- @pytest.fixture(autouse=True)
- def setup_method_fixture(self, request):
- self.certificate()
- listener_conf = {
- "pass": "routes",
- "tls": {
- "certificate": "default",
- "session": {"cache_size": 0, "tickets": True},
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ client.certificate()
+
+ listener_conf = {
+ "pass": "routes",
+ "tls": {
+ "certificate": "default",
+ "session": {"cache_size": 0, "tickets": True},
+ },
+ }
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": listener_conf,
+ "*:7081": listener_conf,
+ "*:7082": listener_conf,
},
+ "routes": [{"action": {"return": 200}}],
+ "applications": {},
}
+ ), 'load application configuration'
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": listener_conf,
- "*:7081": listener_conf,
- "*:7082": listener_conf,
- },
- "routes": [{"action": {"return": 200}}],
- "applications": {},
- }
- ), 'load application configuration'
-
- def set_tickets(self, tickets=True, port=7080):
- assert 'success' in self.conf(
- {"cache_size": 0, "tickets": tickets},
- f'listeners/*:{port}/tls/session',
- )
- def connect(self, ctx=None, session=None, port=7080):
- sock = socket.create_connection(('127.0.0.1', port))
+def connect(ctx=None, session=None, port=7080):
+ sock = socket.create_connection(('127.0.0.1', port))
- if ctx is None:
- ctx = Context(TLSv1_2_METHOD)
+ if ctx is None:
+ ctx = Context(TLSv1_2_METHOD)
- client = Connection(ctx, sock)
- client.set_connect_state()
+ conn = Connection(ctx, sock)
+ conn.set_connect_state()
- if session is not None:
- client.set_session(session)
+ if session is not None:
+ conn.set_session(session)
- client.do_handshake()
- client.shutdown()
+ conn.do_handshake()
+ conn.shutdown()
+
+ return (
+ conn.get_session(),
+ ctx,
+ _lib.SSL_session_reused(conn._ssl),
+ )
- return (
- client.get_session(),
- ctx,
- _lib.SSL_session_reused(client._ssl),
- )
- def has_ticket(self, sess):
- return _lib.SSL_SESSION_has_ticket(sess._session)
+def has_ticket(sess):
+ return _lib.SSL_SESSION_has_ticket(sess._session)
- @pytest.mark.skipif(
- not hasattr(_lib, 'SSL_SESSION_has_ticket'),
- reason='ticket check is not supported',
+
+def set_tickets(tickets=True, port=7080):
+ assert 'success' in client.conf(
+ {"cache_size": 0, "tickets": tickets},
+ f'listeners/*:{port}/tls/session',
)
- def test_tls_ticket(self):
- sess, ctx, reused = self.connect()
- assert self.has_ticket(sess), 'tickets True'
- assert not reused, 'tickets True not reused'
- sess, ctx, reused = self.connect(ctx, sess)
- assert self.has_ticket(sess), 'tickets True reconnect'
- assert reused, 'tickets True reused'
- self.set_tickets(tickets=False)
+@pytest.mark.skipif(
+ not hasattr(_lib, 'SSL_SESSION_has_ticket'),
+ reason='ticket check is not supported',
+)
+def test_tls_ticket():
+ sess, ctx, reused = connect()
+ assert has_ticket(sess), 'tickets True'
+ assert not reused, 'tickets True not reused'
+
+ sess, ctx, reused = connect(ctx, sess)
+ assert has_ticket(sess), 'tickets True reconnect'
+ assert reused, 'tickets True reused'
- sess, _, _ = self.connect()
- assert not self.has_ticket(sess), 'tickets False'
+ set_tickets(tickets=False)
- assert 'success' in self.conf_delete(
- 'listeners/*:7080/tls/session/tickets'
- ), 'tickets default configure'
+ sess, _, _ = connect()
+ assert not has_ticket(sess), 'tickets False'
- sess, _, _ = self.connect()
- assert not self.has_ticket(sess), 'tickets default (false)'
+ assert 'success' in client.conf_delete(
+ 'listeners/*:7080/tls/session/tickets'
+ ), 'tickets default configure'
- @pytest.mark.skipif(
- not hasattr(_lib, 'SSL_SESSION_has_ticket'),
- reason='ticket check is not supported',
- )
- def test_tls_ticket_string(self):
- self.set_tickets(self.ticket)
- sess, ctx, _ = self.connect()
- assert self.has_ticket(sess), 'tickets string'
+ sess, _, _ = connect()
+ assert not has_ticket(sess), 'tickets default (false)'
- sess2, _, reused = self.connect(ctx, sess)
- assert self.has_ticket(sess2), 'tickets string reconnect'
- assert reused, 'tickets string reused'
- sess2, _, reused = self.connect(ctx, sess, port=7081)
- assert self.has_ticket(sess2), 'connect True'
- assert not reused, 'connect True not reused'
+@pytest.mark.skipif(
+ not hasattr(_lib, 'SSL_SESSION_has_ticket'),
+ reason='ticket check is not supported',
+)
+def test_tls_ticket_string():
+ set_tickets(TICKET)
+ sess, ctx, _ = connect()
+ assert has_ticket(sess), 'tickets string'
- self.set_tickets(self.ticket2, port=7081)
+ sess2, _, reused = connect(ctx, sess)
+ assert has_ticket(sess2), 'tickets string reconnect'
+ assert reused, 'tickets string reused'
- sess2, _, reused = self.connect(ctx, sess, port=7081)
- assert self.has_ticket(sess2), 'wrong ticket'
- assert not reused, 'wrong ticket not reused'
+ sess2, _, reused = connect(ctx, sess, port=7081)
+ assert has_ticket(sess2), 'connect True'
+ assert not reused, 'connect True not reused'
- self.set_tickets(self.ticket80)
+ set_tickets(TICKET2, port=7081)
- sess, ctx, _ = self.connect()
- assert self.has_ticket(sess), 'tickets string 80'
+ sess2, _, reused = connect(ctx, sess, port=7081)
+ assert has_ticket(sess2), 'wrong ticket'
+ assert not reused, 'wrong ticket not reused'
- sess2, _, reused = self.connect(ctx, sess)
- assert self.has_ticket(sess2), 'tickets string 80 reconnect'
- assert reused, 'tickets string 80 reused'
+ set_tickets(TICKET80)
- sess2, _, reused = self.connect(ctx, sess, port=7081)
- assert self.has_ticket(sess2), 'wrong ticket 80'
- assert not reused, 'wrong ticket 80 not reused'
+ sess, ctx, _ = connect()
+ assert has_ticket(sess), 'tickets string 80'
- @pytest.mark.skipif(
- not hasattr(_lib, 'SSL_SESSION_has_ticket'),
- reason='ticket check is not supported',
- )
- def test_tls_ticket_array(self):
- self.set_tickets([])
-
- sess, ctx, _ = self.connect()
- assert not self.has_ticket(sess), 'tickets array empty'
-
- self.set_tickets([self.ticket, self.ticket2])
- self.set_tickets(self.ticket, port=7081)
- self.set_tickets(self.ticket2, port=7082)
-
- sess, ctx, _ = self.connect()
- _, _, reused = self.connect(ctx, sess, port=7081)
- assert not reused, 'not last ticket'
- _, _, reused = self.connect(ctx, sess, port=7082)
- assert reused, 'last ticket'
-
- sess, ctx, _ = self.connect(port=7081)
- _, _, reused = self.connect(ctx, sess)
- assert reused, 'first ticket'
-
- sess, ctx, _ = self.connect(port=7082)
- _, _, reused = self.connect(ctx, sess)
- assert reused, 'second ticket'
-
- assert 'success' in self.conf_delete(
- 'listeners/*:7080/tls/session/tickets/0'
- ), 'removed first ticket'
- assert 'success' in self.conf_post(
- f'"{self.ticket}"', 'listeners/*:7080/tls/session/tickets'
- ), 'add new ticket to the end of array'
-
- sess, ctx, _ = self.connect()
- _, _, reused = self.connect(ctx, sess, port=7082)
- assert not reused, 'not last ticket 2'
- _, _, reused = self.connect(ctx, sess, port=7081)
- assert reused, 'last ticket 2'
-
- def test_tls_ticket_invalid(self):
- def check_tickets(tickets):
- assert 'error' in self.conf(
- {"tickets": tickets},
- 'listeners/*:7080/tls/session',
- )
-
- check_tickets({})
- check_tickets('!?&^' * 16)
- check_tickets(f'{self.ticket[:-2]}!{self.ticket[3:]}')
- check_tickets(self.ticket[:-1])
- check_tickets(f'{self.ticket}b')
- check_tickets(f'{self.ticket}blah')
- check_tickets([True, self.ticket, self.ticket2])
- check_tickets([self.ticket, 'blah', self.ticket2])
- check_tickets([self.ticket, self.ticket2, []])
+ sess2, _, reused = connect(ctx, sess)
+ assert has_ticket(sess2), 'tickets string 80 reconnect'
+ assert reused, 'tickets string 80 reused'
+
+ sess2, _, reused = connect(ctx, sess, port=7081)
+ assert has_ticket(sess2), 'wrong ticket 80'
+ assert not reused, 'wrong ticket 80 not reused'
+
+
+@pytest.mark.skipif(
+ not hasattr(_lib, 'SSL_SESSION_has_ticket'),
+ reason='ticket check is not supported',
+)
+def test_tls_ticket_array():
+ set_tickets([])
+
+ sess, ctx, _ = connect()
+ assert not has_ticket(sess), 'tickets array empty'
+
+ set_tickets([TICKET, TICKET2])
+ set_tickets(TICKET, port=7081)
+ set_tickets(TICKET2, port=7082)
+
+ sess, ctx, _ = connect()
+ _, _, reused = connect(ctx, sess, port=7081)
+ assert not reused, 'not last ticket'
+ _, _, reused = connect(ctx, sess, port=7082)
+ assert reused, 'last ticket'
+
+ sess, ctx, _ = connect(port=7081)
+ _, _, reused = connect(ctx, sess)
+ assert reused, 'first ticket'
+
+ sess, ctx, _ = connect(port=7082)
+ _, _, reused = connect(ctx, sess)
+ assert reused, 'second ticket'
+
+ assert 'success' in client.conf_delete(
+ 'listeners/*:7080/tls/session/tickets/0'
+ ), 'removed first ticket'
+ assert 'success' in client.conf_post(
+ f'"{TICKET}"', 'listeners/*:7080/tls/session/tickets'
+ ), 'add new ticket to the end of array'
+
+ sess, ctx, _ = connect()
+ _, _, reused = connect(ctx, sess, port=7082)
+ assert not reused, 'not last ticket 2'
+ _, _, reused = connect(ctx, sess, port=7081)
+ assert reused, 'last ticket 2'
+
+
+def test_tls_ticket_invalid():
+ def check_tickets(tickets):
+ assert 'error' in client.conf(
+ {"tickets": tickets},
+ 'listeners/*:7080/tls/session',
+ )
+
+ check_tickets({})
+ check_tickets('!?&^' * 16)
+ check_tickets(f'{TICKET[:-2]}!{TICKET[3:]}')
+ check_tickets(TICKET[:-1])
+ check_tickets(f'{TICKET}b')
+ check_tickets(f'{TICKET}blah')
+ check_tickets([True, TICKET, TICKET2])
+ check_tickets([TICKET, 'blah', TICKET2])
+ check_tickets([TICKET, TICKET2, []])
diff --git a/test/test_unix_abstract.py b/test/test_unix_abstract.py
index c562487b..7ed2389c 100644
--- a/test/test_unix_abstract.py
+++ b/test/test_unix_abstract.py
@@ -1,109 +1,105 @@
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {
+ 'modules': {'python': 'any'},
+ 'features': {'unix_abstract': True},
+}
-class TestUnixAbstract(TestApplicationPython):
- prerequisites = {
- 'modules': {'python': 'any'},
- 'features': ['unix_abstract'],
- }
+client = ApplicationPython()
- def test_unix_abstract_source(self):
- addr = '\0sock'
- def source(source):
- assert 'success' in self.conf(
- f'"{source}"', 'routes/0/match/source'
- )
+def test_unix_abstract_source():
+ addr = '\0sock'
- assert 'success' in self.conf(
- {
- "listeners": {
- "127.0.0.1:7080": {"pass": "routes"},
- f"unix:@{addr[1:]}": {"pass": "routes"},
- },
- "routes": [
- {
- "match": {"source": "!0.0.0.0/0"},
- "action": {"return": 200},
- }
- ],
- "applications": {},
- }
- )
+ def source(source):
+ assert 'success' in client.conf(f'"{source}"', 'routes/0/match/source')
- assert (
- self.get(sock_type='unix', addr=addr)['status'] == 200
- ), 'neg ipv4'
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "127.0.0.1:7080": {"pass": "routes"},
+ f"unix:@{addr[1:]}": {"pass": "routes"},
+ },
+ "routes": [
+ {
+ "match": {"source": "!0.0.0.0/0"},
+ "action": {"return": 200},
+ }
+ ],
+ "applications": {},
+ }
+ )
- source("!::/0")
- assert (
- self.get(sock_type='unix', addr=addr)['status'] == 200
- ), 'neg ipv6'
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200, 'neg ipv4'
- source("unix")
- assert self.get()['status'] == 404, 'ipv4'
- assert self.get(sock_type='unix', addr=addr)['status'] == 200, 'unix'
+ source("!::/0")
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200, 'neg ipv6'
- def test_unix_abstract_client_ip(self):
- def get_xff(xff, sock_type='ipv4'):
- address = {
- 'ipv4': ('127.0.0.1', 7080),
- 'ipv6': ('::1', 7081),
- 'unix': ('\0sock', None),
- }
- (addr, port) = address[sock_type]
+ source("unix")
+ assert client.get()['status'] == 404, 'ipv4'
+ assert client.get(sock_type='unix', addr=addr)['status'] == 200, 'unix'
- return self.get(
- sock_type=sock_type,
- addr=addr,
- port=port,
- headers={'Connection': 'close', 'X-Forwarded-For': xff},
- )['body']
- client_ip_dir = f"{option.test_dir}/python/client_ip"
- assert 'success' in self.conf(
- {
- "listeners": {
- "127.0.0.1:7080": {
- "client_ip": {
- "header": "X-Forwarded-For",
- "source": "unix",
- },
- "pass": "applications/client_ip",
- },
- "[::1]:7081": {
- "client_ip": {
- "header": "X-Forwarded-For",
- "source": "unix",
- },
- "pass": "applications/client_ip",
+def test_unix_abstract_client_ip():
+ def get_xff(xff, sock_type='ipv4'):
+ address = {
+ 'ipv4': ('127.0.0.1', 7080),
+ 'ipv6': ('::1', 7081),
+ 'unix': ('\0sock', None),
+ }
+ (addr, port) = address[sock_type]
+
+ return client.get(
+ sock_type=sock_type,
+ addr=addr,
+ port=port,
+ headers={'Connection': 'close', 'X-Forwarded-For': xff},
+ )['body']
+
+ client_ip_dir = f"{option.test_dir}/python/client_ip"
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "127.0.0.1:7080": {
+ "client_ip": {
+ "header": "X-Forwarded-For",
+ "source": "unix",
},
- "unix:@sock": {
- "client_ip": {
- "header": "X-Forwarded-For",
- "source": "unix",
- },
- "pass": "applications/client_ip",
+ "pass": "applications/client_ip",
+ },
+ "[::1]:7081": {
+ "client_ip": {
+ "header": "X-Forwarded-For",
+ "source": "unix",
},
+ "pass": "applications/client_ip",
},
- "applications": {
+ "unix:@sock": {
"client_ip": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": client_ip_dir,
- "working_directory": client_ip_dir,
- "module": "wsgi",
- }
+ "header": "X-Forwarded-For",
+ "source": "unix",
+ },
+ "pass": "applications/client_ip",
},
- }
- )
+ },
+ "applications": {
+ "client_ip": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": client_ip_dir,
+ "working_directory": client_ip_dir,
+ "module": "wsgi",
+ }
+ },
+ }
+ )
- assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
- assert get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
+ assert get_xff('1.1.1.1') == '127.0.0.1', 'bad source ipv4'
+ assert get_xff('1.1.1.1', 'ipv6') == '::1', 'bad source ipv6'
- for ip in [
- '1.1.1.1',
- '::11.22.33.44',
- ]:
- assert get_xff(ip, 'unix') == ip, 'replace'
+ for ip in [
+ '1.1.1.1',
+ '::11.22.33.44',
+ ]:
+ assert get_xff(ip, 'unix') == ip, 'replace'
diff --git a/test/test_upstreams_rr.py b/test/test_upstreams_rr.py
index 324c93cb..046b5614 100644
--- a/test/test_upstreams_rr.py
+++ b/test/test_upstreams_rr.py
@@ -1,476 +1,492 @@
import os
import re
-from unit.applications.lang.python import TestApplicationPython
+import pytest
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+prerequisites = {'modules': {'python': 'any'}}
-class TestUpstreamsRR(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def setup_method(self):
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "upstreams/one"},
- "*:7090": {"pass": "upstreams/two"},
- "*:7081": {"pass": "routes/one"},
- "*:7082": {"pass": "routes/two"},
- "*:7083": {"pass": "routes/three"},
- },
- "upstreams": {
- "one": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- "two": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
+
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "upstreams/one"},
+ "*:7090": {"pass": "upstreams/two"},
+ "*:7081": {"pass": "routes/one"},
+ "*:7082": {"pass": "routes/two"},
+ "*:7083": {"pass": "routes/three"},
+ },
+ "upstreams": {
+ "one": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
},
},
- "routes": {
- "one": [{"action": {"return": 200}}],
- "two": [{"action": {"return": 201}}],
- "three": [{"action": {"return": 202}}],
+ "two": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
+ },
},
- "applications": {},
},
- ), 'upstreams initial configuration'
+ "routes": {
+ "one": [{"action": {"return": 200}}],
+ "two": [{"action": {"return": 201}}],
+ "three": [{"action": {"return": 202}}],
+ },
+ "applications": {},
+ },
+ ), 'upstreams initial configuration'
+
+ client.cpu_count = os.cpu_count()
- self.cpu_count = os.cpu_count()
- def get_resps(self, req=100, port=7080):
- resps = [0]
+def get_resps(req=100, port=7080):
+ resps = [0]
- for _ in range(req):
- status = self.get(port=port)['status']
- if 200 > status or status > 209:
- continue
+ for _ in range(req):
+ status = client.get(port=port)['status']
+ if 200 > status or status > 209:
+ continue
- ups = status % 10
- if ups > len(resps) - 1:
- resps.extend([0] * (ups - len(resps) + 1))
+ ups = status % 10
+ if ups > len(resps) - 1:
+ resps.extend([0] * (ups - len(resps) + 1))
- resps[ups] += 1
+ resps[ups] += 1
- return resps
+ return resps
- def get_resps_sc(self, req=100, port=7080):
- to_send = b"""GET / HTTP/1.1
+
+def get_resps_sc(req=100, port=7080):
+ to_send = b"""GET / HTTP/1.1
Host: localhost
""" * (
- req - 1
- )
+ req - 1
+ )
- to_send += b"""GET / HTTP/1.1
+ to_send += b"""GET / HTTP/1.1
Host: localhost
Connection: close
"""
- resp = self.http(to_send, raw_resp=True, raw=True, port=port)
- status = re.findall(r'HTTP\/\d\.\d\s(\d\d\d)', resp)
- status = list(filter(lambda x: x[:2] == '20', status))
- ups = list(map(lambda x: int(x[-1]), status))
+ resp = client.http(to_send, raw_resp=True, raw=True, port=port)
+ status = re.findall(r'HTTP\/\d\.\d\s(\d\d\d)', resp)
+ status = list(filter(lambda x: x[:2] == '20', status))
+ ups = list(map(lambda x: int(x[-1]), status))
- resps = [0] * (max(ups) + 1)
- for i in range(len(ups)):
- resps[ups[i]] += 1
+ resps = [0] * (max(ups) + 1)
+ for _, up in enumerate(ups):
+ resps[up] += 1
- return resps
+ return resps
- def test_upstreams_rr_no_weight(self):
- resps = self.get_resps()
- assert sum(resps) == 100, 'no weight sum'
- assert abs(resps[0] - resps[1]) <= self.cpu_count, 'no weight'
- assert 'success' in self.conf_delete(
- 'upstreams/one/servers/127.0.0.1:7081'
- ), 'no weight server remove'
+def test_upstreams_rr_no_weight():
+ resps = get_resps()
+ assert sum(resps) == 100, 'no weight sum'
+ assert abs(resps[0] - resps[1]) <= client.cpu_count, 'no weight'
- resps = self.get_resps(req=50)
- assert resps[1] == 50, 'no weight 2'
+ assert 'success' in client.conf_delete(
+ 'upstreams/one/servers/127.0.0.1:7081'
+ ), 'no weight server remove'
- assert 'success' in self.conf(
- {}, 'upstreams/one/servers/127.0.0.1:7081'
- ), 'no weight server revert'
+ resps = get_resps(req=50)
+ assert resps[1] == 50, 'no weight 2'
- resps = self.get_resps()
- assert sum(resps) == 100, 'no weight 3 sum'
- assert abs(resps[0] - resps[1]) <= self.cpu_count, 'no weight 3'
+ assert 'success' in client.conf(
+ {}, 'upstreams/one/servers/127.0.0.1:7081'
+ ), 'no weight server revert'
- assert 'success' in self.conf(
- {}, 'upstreams/one/servers/127.0.0.1:7083'
- ), 'no weight server new'
+ resps = get_resps()
+ assert sum(resps) == 100, 'no weight 3 sum'
+ assert abs(resps[0] - resps[1]) <= client.cpu_count, 'no weight 3'
- resps = self.get_resps()
- assert sum(resps) == 100, 'no weight 4 sum'
- assert max(resps) - min(resps) <= self.cpu_count, 'no weight 4'
+ assert 'success' in client.conf(
+ {}, 'upstreams/one/servers/127.0.0.1:7083'
+ ), 'no weight server new'
- resps = self.get_resps_sc(req=30)
- assert resps[0] == 10, 'no weight 4 0'
- assert resps[1] == 10, 'no weight 4 1'
- assert resps[2] == 10, 'no weight 4 2'
+ resps = get_resps()
+ assert sum(resps) == 100, 'no weight 4 sum'
+ assert max(resps) - min(resps) <= client.cpu_count, 'no weight 4'
- def test_upstreams_rr_weight(self):
- assert 'success' in self.conf(
- {"weight": 3}, 'upstreams/one/servers/127.0.0.1:7081'
- ), 'configure weight'
+ resps = get_resps_sc(req=30)
+ assert resps[0] == 10, 'no weight 4 0'
+ assert resps[1] == 10, 'no weight 4 1'
+ assert resps[2] == 10, 'no weight 4 2'
- resps = self.get_resps_sc()
- assert resps[0] == 75, 'weight 3 0'
- assert resps[1] == 25, 'weight 3 1'
- assert 'success' in self.conf_delete(
- 'upstreams/one/servers/127.0.0.1:7081/weight'
- ), 'configure weight remove'
- resps = self.get_resps_sc(req=10)
- assert resps[0] == 5, 'weight 0 0'
- assert resps[1] == 5, 'weight 0 1'
+def test_upstreams_rr_weight():
+ assert 'success' in client.conf(
+ {"weight": 3}, 'upstreams/one/servers/127.0.0.1:7081'
+ ), 'configure weight'
- assert 'success' in self.conf(
- '1', 'upstreams/one/servers/127.0.0.1:7081/weight'
- ), 'configure weight 1'
+ resps = get_resps_sc()
+ assert resps[0] == 75, 'weight 3 0'
+ assert resps[1] == 25, 'weight 3 1'
- resps = self.get_resps_sc()
- assert resps[0] == 50, 'weight 1 0'
- assert resps[1] == 50, 'weight 1 1'
+ assert 'success' in client.conf_delete(
+ 'upstreams/one/servers/127.0.0.1:7081/weight'
+ ), 'configure weight remove'
+ resps = get_resps_sc(req=10)
+ assert resps[0] == 5, 'weight 0 0'
+ assert resps[1] == 5, 'weight 0 1'
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {"weight": 3},
- "127.0.0.1:7083": {"weight": 2},
- },
- 'upstreams/one/servers',
- ), 'configure weight 2'
+ assert 'success' in client.conf(
+ '1', 'upstreams/one/servers/127.0.0.1:7081/weight'
+ ), 'configure weight 1'
- resps = self.get_resps_sc()
- assert resps[0] == 60, 'weight 2 0'
- assert resps[2] == 40, 'weight 2 1'
+ resps = get_resps_sc()
+ assert resps[0] == 50, 'weight 1 0'
+ assert resps[1] == 50, 'weight 1 1'
- def test_upstreams_rr_weight_rational(self):
- def set_weights(w1, w2):
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {"weight": w1},
- "127.0.0.1:7082": {"weight": w2},
- },
- 'upstreams/one/servers',
- ), 'configure weights'
-
- def check_reqs(w1, w2, reqs=10):
- resps = self.get_resps_sc(req=reqs)
- assert resps[0] == reqs * w1 / (w1 + w2), 'weight 1'
- assert resps[1] == reqs * w2 / (w1 + w2), 'weight 2'
-
- def check_weights(w1, w2):
- set_weights(w1, w2)
- check_reqs(w1, w2)
-
- check_weights(0, 1)
- check_weights(0, 999999.0123456)
- check_weights(1, 9)
- check_weights(100000, 900000)
- check_weights(1, 0.25)
- check_weights(1, 0.25)
- check_weights(0.2, 0.8)
- check_weights(1, 1.5)
- check_weights(1e-3, 1e-3)
- check_weights(1e-20, 1e-20)
- check_weights(1e4, 1e4)
- check_weights(1000000, 1000000)
-
- set_weights(0.25, 0.25)
- assert 'success' in self.conf_delete(
- 'upstreams/one/servers/127.0.0.1:7081/weight'
- ), 'delete weight'
- check_reqs(1, 0.25)
-
- assert 'success' in self.conf(
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {"weight": 3},
+ "127.0.0.1:7083": {"weight": 2},
+ },
+ 'upstreams/one/servers',
+ ), 'configure weight 2'
+
+ resps = get_resps_sc()
+ assert resps[0] == 60, 'weight 2 0'
+ assert resps[2] == 40, 'weight 2 1'
+
+
+def test_upstreams_rr_weight_rational():
+ def set_weights(w1, w2):
+ assert 'success' in client.conf(
{
- "127.0.0.1:7081": {"weight": 0.1},
- "127.0.0.1:7082": {"weight": 1},
- "127.0.0.1:7083": {"weight": 0.9},
+ "127.0.0.1:7081": {"weight": w1},
+ "127.0.0.1:7082": {"weight": w2},
},
'upstreams/one/servers',
), 'configure weights'
- resps = self.get_resps_sc(req=20)
- assert resps[0] == 1, 'weight 3 1'
- assert resps[1] == 10, 'weight 3 2'
- assert resps[2] == 9, 'weight 3 3'
-
- def test_upstreams_rr_independent(self):
- def sum_resps(*args):
- sum = [0] * len(args[0])
- for arg in args:
- sum = [x + y for x, y in zip(sum, arg)]
-
- return sum
-
- resps = self.get_resps_sc(req=30, port=7090)
- assert resps[0] == 15, 'dep two before 0'
- assert resps[1] == 15, 'dep two before 1'
-
- resps = self.get_resps_sc(req=30)
- assert resps[0] == 15, 'dep one before 0'
- assert resps[1] == 15, 'dep one before 1'
-
- assert 'success' in self.conf(
- '2', 'upstreams/two/servers/127.0.0.1:7081/weight'
- ), 'configure dep weight'
-
- resps = self.get_resps_sc(req=30, port=7090)
- assert resps[0] == 20, 'dep two 0'
- assert resps[1] == 10, 'dep two 1'
-
- resps = self.get_resps_sc(req=30)
- assert resps[0] == 15, 'dep one 0'
- assert resps[1] == 15, 'dep one 1'
-
- assert 'success' in self.conf(
- '1', 'upstreams/two/servers/127.0.0.1:7081/weight'
- ), 'configure dep weight 1'
-
- r_one, r_two = [0, 0], [0, 0]
- for _ in range(10):
- r_one = sum_resps(r_one, self.get_resps(req=10))
- r_two = sum_resps(r_two, self.get_resps(req=10, port=7090))
-
- assert sum(r_one) == 100, 'dep one mix sum'
- assert abs(r_one[0] - r_one[1]) <= self.cpu_count, 'dep one mix'
- assert sum(r_two) == 100, 'dep two mix sum'
- assert abs(r_two[0] - r_two[1]) <= self.cpu_count, 'dep two mix'
-
- def test_upstreams_rr_delay(self):
- delayed_dir = f'{option.test_dir}/python/delayed'
- assert 'success' in self.conf(
- {
- "listeners": {
- "*:7080": {"pass": "upstreams/one"},
- "*:7081": {"pass": "routes"},
- "*:7082": {"pass": "routes"},
- },
- "upstreams": {
- "one": {
- "servers": {
- "127.0.0.1:7081": {},
- "127.0.0.1:7082": {},
- },
- },
- },
- "routes": [
- {
- "match": {"destination": "*:7081"},
- "action": {"pass": "applications/delayed"},
- },
- {
- "match": {"destination": "*:7082"},
- "action": {"return": 201},
+
+ def check_reqs(w1, w2, reqs=10):
+ resps = get_resps_sc(req=reqs)
+ assert resps[0] == reqs * w1 / (w1 + w2), 'weight 1'
+ assert resps[1] == reqs * w2 / (w1 + w2), 'weight 2'
+
+ def check_weights(w1, w2):
+ set_weights(w1, w2)
+ check_reqs(w1, w2)
+
+ check_weights(0, 1)
+ check_weights(0, 999999.0123456)
+ check_weights(1, 9)
+ check_weights(100000, 900000)
+ check_weights(1, 0.25)
+ check_weights(1, 0.25)
+ check_weights(0.2, 0.8)
+ check_weights(1, 1.5)
+ check_weights(1e-3, 1e-3)
+ check_weights(1e-20, 1e-20)
+ check_weights(1e4, 1e4)
+ check_weights(1000000, 1000000)
+
+ set_weights(0.25, 0.25)
+ assert 'success' in client.conf_delete(
+ 'upstreams/one/servers/127.0.0.1:7081/weight'
+ ), 'delete weight'
+ check_reqs(1, 0.25)
+
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {"weight": 0.1},
+ "127.0.0.1:7082": {"weight": 1},
+ "127.0.0.1:7083": {"weight": 0.9},
+ },
+ 'upstreams/one/servers',
+ ), 'configure weights'
+ resps = get_resps_sc(req=20)
+ assert resps[0] == 1, 'weight 3 1'
+ assert resps[1] == 10, 'weight 3 2'
+ assert resps[2] == 9, 'weight 3 3'
+
+
+def test_upstreams_rr_independent():
+ def sum_resps(*args):
+ sum_r = [0] * len(args[0])
+ for arg in args:
+ sum_r = [x + y for x, y in zip(sum_r, arg)]
+
+ return sum_r
+
+ resps = get_resps_sc(req=30, port=7090)
+ assert resps[0] == 15, 'dep two before 0'
+ assert resps[1] == 15, 'dep two before 1'
+
+ resps = get_resps_sc(req=30)
+ assert resps[0] == 15, 'dep one before 0'
+ assert resps[1] == 15, 'dep one before 1'
+
+ assert 'success' in client.conf(
+ '2', 'upstreams/two/servers/127.0.0.1:7081/weight'
+ ), 'configure dep weight'
+
+ resps = get_resps_sc(req=30, port=7090)
+ assert resps[0] == 20, 'dep two 0'
+ assert resps[1] == 10, 'dep two 1'
+
+ resps = get_resps_sc(req=30)
+ assert resps[0] == 15, 'dep one 0'
+ assert resps[1] == 15, 'dep one 1'
+
+ assert 'success' in client.conf(
+ '1', 'upstreams/two/servers/127.0.0.1:7081/weight'
+ ), 'configure dep weight 1'
+
+ r_one, r_two = [0, 0], [0, 0]
+ for _ in range(10):
+ r_one = sum_resps(r_one, get_resps(req=10))
+ r_two = sum_resps(r_two, get_resps(req=10, port=7090))
+
+ assert sum(r_one) == 100, 'dep one mix sum'
+ assert abs(r_one[0] - r_one[1]) <= client.cpu_count, 'dep one mix'
+ assert sum(r_two) == 100, 'dep two mix sum'
+ assert abs(r_two[0] - r_two[1]) <= client.cpu_count, 'dep two mix'
+
+
+def test_upstreams_rr_delay():
+ delayed_dir = f'{option.test_dir}/python/delayed'
+ assert 'success' in client.conf(
+ {
+ "listeners": {
+ "*:7080": {"pass": "upstreams/one"},
+ "*:7081": {"pass": "routes"},
+ "*:7082": {"pass": "routes"},
+ },
+ "upstreams": {
+ "one": {
+ "servers": {
+ "127.0.0.1:7081": {},
+ "127.0.0.1:7082": {},
},
- ],
- "applications": {
- "delayed": {
- "type": self.get_application_type(),
- "processes": {"spare": 0},
- "path": delayed_dir,
- "working_directory": delayed_dir,
- "module": "wsgi",
- }
},
},
- ), 'upstreams initial configuration'
-
- req = 50
-
- socks = []
- for i in range(req):
- delay = 1 if i % 5 == 0 else 0
- sock = self.get(
- headers={
- 'Host': 'localhost',
- 'Content-Length': '0',
- 'X-Delay': str(delay),
- 'Connection': 'close',
+ "routes": [
+ {
+ "match": {"destination": "*:7081"},
+ "action": {"pass": "applications/delayed"},
},
- no_recv=True,
- )
- socks.append(sock)
+ {
+ "match": {"destination": "*:7082"},
+ "action": {"return": 201},
+ },
+ ],
+ "applications": {
+ "delayed": {
+ "type": client.get_application_type(),
+ "processes": {"spare": 0},
+ "path": delayed_dir,
+ "working_directory": delayed_dir,
+ "module": "wsgi",
+ }
+ },
+ },
+ ), 'upstreams initial configuration'
+
+ req = 50
+
+ socks = []
+ for i in range(req):
+ delay = 1 if i % 5 == 0 else 0
+ sock = client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Content-Length': '0',
+ 'X-Delay': str(delay),
+ 'Connection': 'close',
+ },
+ no_recv=True,
+ )
+ socks.append(sock)
+
+ resps = [0, 0]
+ for i in range(req):
+ resp = client.recvall(socks[i]).decode()
+ socks[i].close()
- resps = [0, 0]
- for i in range(req):
- resp = self.recvall(socks[i]).decode()
- socks[i].close()
+ m = re.search(r'HTTP/1.1 20(\d)', resp)
+ assert m is not None, 'status'
+ resps[int(m.group(1))] += 1
- m = re.search(r'HTTP/1.1 20(\d)', resp)
- assert m is not None, 'status'
- resps[int(m.group(1))] += 1
+ assert sum(resps) == req, 'delay sum'
+ assert abs(resps[0] - resps[1]) <= client.cpu_count, 'delay'
- assert sum(resps) == req, 'delay sum'
- assert abs(resps[0] - resps[1]) <= self.cpu_count, 'delay'
- def test_upstreams_rr_active_req(self):
- conns = 5
- socks = []
- socks2 = []
+def test_upstreams_rr_active_req():
+ conns = 5
+ socks = []
+ socks2 = []
- for _ in range(conns):
- sock = self.get(no_recv=True)
- socks.append(sock)
+ for _ in range(conns):
+ sock = client.get(no_recv=True)
+ socks.append(sock)
- sock2 = self.http(
- b"""POST / HTTP/1.1
+ sock2 = client.http(
+ b"""POST / HTTP/1.1
Host: localhost
Content-Length: 10
Connection: close
""",
- no_recv=True,
- raw=True,
- )
- socks2.append(sock2)
-
- # Send one more request and read response to make sure that previous
- # requests had enough time to reach server.
-
- assert self.get()['body'] == ''
-
- assert 'success' in self.conf(
- {"127.0.0.1:7083": {"weight": 2}},
- 'upstreams/one/servers',
- ), 'active req new server'
- assert 'success' in self.conf_delete(
- 'upstreams/one/servers/127.0.0.1:7083'
- ), 'active req server remove'
- assert 'success' in self.conf_delete(
- 'listeners/*:7080'
- ), 'delete listener'
- assert 'success' in self.conf_delete(
- 'upstreams/one'
- ), 'active req upstream remove'
-
- for i in range(conns):
- assert (
- self.http(b'', sock=socks[i], raw=True)['body'] == ''
- ), 'active req GET'
-
- assert (
- self.http(b"""0123456789""", sock=socks2[i], raw=True)['body']
- == ''
- ), 'active req POST'
-
- def test_upstreams_rr_bad_server(self):
- assert 'success' in self.conf(
- {"weight": 1}, 'upstreams/one/servers/127.0.0.1:7084'
- ), 'configure bad server'
-
- resps = self.get_resps_sc(req=30)
- assert resps[0] == 10, 'bad server 0'
- assert resps[1] == 10, 'bad server 1'
- assert sum(resps) == 20, 'bad server sum'
-
- def test_upstreams_rr_pipeline(self):
- resps = self.get_resps_sc()
-
- assert resps[0] == 50, 'pipeline 0'
- assert resps[1] == 50, 'pipeline 1'
-
- def test_upstreams_rr_post(self):
- resps = [0, 0]
- for _ in range(50):
- resps[self.get()['status'] % 10] += 1
- resps[self.post(body='0123456789')['status'] % 10] += 1
-
- assert sum(resps) == 100, 'post sum'
- assert abs(resps[0] - resps[1]) <= self.cpu_count, 'post'
-
- def test_upstreams_rr_unix(self, temp_dir):
- addr_0 = f'{temp_dir}/sock_0'
- addr_1 = f'{temp_dir}/sock_1'
-
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "upstreams/one"},
- f"unix:{addr_0}": {"pass": "routes/one"},
- f"unix:{addr_1}": {"pass": "routes/two"},
- },
- 'listeners',
- ), 'configure listeners unix'
-
- assert 'success' in self.conf(
- {f"unix:{addr_0}": {}, f"unix:{addr_1}": {}},
- 'upstreams/one/servers',
- ), 'configure servers unix'
-
- resps = self.get_resps_sc()
-
- assert resps[0] == 50, 'unix 0'
- assert resps[1] == 50, 'unix 1'
-
- def test_upstreams_rr_ipv6(self):
- assert 'success' in self.conf(
- {
- "*:7080": {"pass": "upstreams/one"},
- "[::1]:7081": {"pass": "routes/one"},
- "[::1]:7082": {"pass": "routes/two"},
- },
- 'listeners',
- ), 'configure listeners ipv6'
-
- assert 'success' in self.conf(
- {"[::1]:7081": {}, "[::1]:7082": {}}, 'upstreams/one/servers'
- ), 'configure servers ipv6'
-
- resps = self.get_resps_sc()
+ no_recv=True,
+ raw=True,
+ )
+ socks2.append(sock2)
- assert resps[0] == 50, 'ipv6 0'
- assert resps[1] == 50, 'ipv6 1'
+ # Send one more request and read response to make sure that previous
+ # requests had enough time to reach server.
- def test_upstreams_rr_servers_empty(self):
- assert 'success' in self.conf(
- {}, 'upstreams/one/servers'
- ), 'configure servers empty'
- assert self.get()['status'] == 502, 'servers empty'
+ assert client.get()['body'] == ''
- assert 'success' in self.conf(
- {"127.0.0.1:7081": {"weight": 0}}, 'upstreams/one/servers'
- ), 'configure servers empty one'
- assert self.get()['status'] == 502, 'servers empty one'
- assert 'success' in self.conf(
- {
- "127.0.0.1:7081": {"weight": 0},
- "127.0.0.1:7082": {"weight": 0},
- },
- 'upstreams/one/servers',
- ), 'configure servers empty two'
- assert self.get()['status'] == 502, 'servers empty two'
-
- def test_upstreams_rr_invalid(self):
- assert 'error' in self.conf({}, 'upstreams'), 'upstreams empty'
- assert 'error' in self.conf(
- {}, 'upstreams/one'
- ), 'named upstreams empty'
- assert 'error' in self.conf(
- {}, 'upstreams/one/servers/127.0.0.1'
- ), 'invalid address'
- assert 'error' in self.conf(
- {}, 'upstreams/one/servers/127.0.0.1:7081/blah'
- ), 'invalid server option'
-
- def check_weight(w):
- assert 'error' in self.conf(
- w, 'upstreams/one/servers/127.0.0.1:7081/weight'
- ), 'invalid weight option'
-
- check_weight({})
- check_weight('-1')
- check_weight('1.')
- check_weight('1.1.')
- check_weight('.')
- check_weight('.01234567890123')
- check_weight('1000001')
- check_weight('2e6')
+ assert 'success' in client.conf(
+ {"127.0.0.1:7083": {"weight": 2}},
+ 'upstreams/one/servers',
+ ), 'active req new server'
+ assert 'success' in client.conf_delete(
+ 'upstreams/one/servers/127.0.0.1:7083'
+ ), 'active req server remove'
+ assert 'success' in client.conf_delete(
+ 'listeners/*:7080'
+ ), 'delete listener'
+ assert 'success' in client.conf_delete(
+ 'upstreams/one'
+ ), 'active req upstream remove'
+
+ for i in range(conns):
+ assert (
+ client.http(b'', sock=socks[i], raw=True)['body'] == ''
+ ), 'active req GET'
+
+ assert (
+ client.http(b"""0123456789""", sock=socks2[i], raw=True)['body']
+ == ''
+ ), 'active req POST'
+
+
+def test_upstreams_rr_bad_server():
+ assert 'success' in client.conf(
+ {"weight": 1}, 'upstreams/one/servers/127.0.0.1:7084'
+ ), 'configure bad server'
+
+ resps = get_resps_sc(req=30)
+ assert resps[0] == 10, 'bad server 0'
+ assert resps[1] == 10, 'bad server 1'
+ assert sum(resps) == 20, 'bad server sum'
+
+
+def test_upstreams_rr_pipeline():
+ resps = get_resps_sc()
+
+ assert resps[0] == 50, 'pipeline 0'
+ assert resps[1] == 50, 'pipeline 1'
+
+
+def test_upstreams_rr_post():
+ resps = [0, 0]
+ for _ in range(50):
+ resps[client.get()['status'] % 10] += 1
+ resps[client.post(body='0123456789')['status'] % 10] += 1
+
+ assert sum(resps) == 100, 'post sum'
+ assert abs(resps[0] - resps[1]) <= client.cpu_count, 'post'
+
+
+def test_upstreams_rr_unix(temp_dir):
+ addr_0 = f'{temp_dir}/sock_0'
+ addr_1 = f'{temp_dir}/sock_1'
+
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "upstreams/one"},
+ f"unix:{addr_0}": {"pass": "routes/one"},
+ f"unix:{addr_1}": {"pass": "routes/two"},
+ },
+ 'listeners',
+ ), 'configure listeners unix'
+
+ assert 'success' in client.conf(
+ {f"unix:{addr_0}": {}, f"unix:{addr_1}": {}},
+ 'upstreams/one/servers',
+ ), 'configure servers unix'
+
+ resps = get_resps_sc()
+
+ assert resps[0] == 50, 'unix 0'
+ assert resps[1] == 50, 'unix 1'
+
+
+def test_upstreams_rr_ipv6():
+ assert 'success' in client.conf(
+ {
+ "*:7080": {"pass": "upstreams/one"},
+ "[::1]:7081": {"pass": "routes/one"},
+ "[::1]:7082": {"pass": "routes/two"},
+ },
+ 'listeners',
+ ), 'configure listeners ipv6'
+
+ assert 'success' in client.conf(
+ {"[::1]:7081": {}, "[::1]:7082": {}}, 'upstreams/one/servers'
+ ), 'configure servers ipv6'
+
+ resps = get_resps_sc()
+
+ assert resps[0] == 50, 'ipv6 0'
+ assert resps[1] == 50, 'ipv6 1'
+
+
+def test_upstreams_rr_servers_empty():
+ assert 'success' in client.conf(
+ {}, 'upstreams/one/servers'
+ ), 'configure servers empty'
+ assert client.get()['status'] == 502, 'servers empty'
+
+ assert 'success' in client.conf(
+ {"127.0.0.1:7081": {"weight": 0}}, 'upstreams/one/servers'
+ ), 'configure servers empty one'
+ assert client.get()['status'] == 502, 'servers empty one'
+ assert 'success' in client.conf(
+ {
+ "127.0.0.1:7081": {"weight": 0},
+ "127.0.0.1:7082": {"weight": 0},
+ },
+ 'upstreams/one/servers',
+ ), 'configure servers empty two'
+ assert client.get()['status'] == 502, 'servers empty two'
+
+
+def test_upstreams_rr_invalid():
+ assert 'error' in client.conf({}, 'upstreams'), 'upstreams empty'
+ assert 'error' in client.conf({}, 'upstreams/one'), 'named upstreams empty'
+ assert 'error' in client.conf(
+ {}, 'upstreams/one/servers/127.0.0.1'
+ ), 'invalid address'
+ assert 'error' in client.conf(
+ {}, 'upstreams/one/servers/127.0.0.1:7081/blah'
+ ), 'invalid server option'
+
+ def check_weight(w):
+ assert 'error' in client.conf(
+ w, 'upstreams/one/servers/127.0.0.1:7081/weight'
+ ), 'invalid weight option'
+
+ check_weight({})
+ check_weight('-1')
+ check_weight('1.')
+ check_weight('1.1.')
+ check_weight('.')
+ check_weight('.01234567890123')
+ check_weight('1000001')
+ check_weight('2e6')
diff --git a/test/test_usr1.py b/test/test_usr1.py
index 4bff0242..ce756fc0 100644
--- a/test/test_usr1.py
+++ b/test/test_usr1.py
@@ -1,87 +1,87 @@
import os
import signal
-from unit.applications.lang.python import TestApplicationPython
+from unit.applications.lang.python import ApplicationPython
from unit.log import Log
from unit.utils import waitforfiles
+prerequisites = {'modules': {'python': 'any'}}
-class TestUSR1(TestApplicationPython):
- prerequisites = {'modules': {'python': 'any'}}
+client = ApplicationPython()
- def test_usr1_access_log(self, temp_dir, unit_pid):
- self.load('empty')
- log = 'access.log'
- log_new = 'new.log'
- log_path = f'{temp_dir}/{log}'
+def test_usr1_access_log(search_in_file, temp_dir, unit_pid, wait_for_record):
+ client.load('empty')
- assert 'success' in self.conf(
- f'"{log_path}"', 'access_log'
- ), 'access log configure'
+ log = 'access.log'
+ log_new = 'new.log'
+ log_path = f'{temp_dir}/{log}'
- assert waitforfiles(log_path), 'open'
+ assert 'success' in client.conf(
+ f'"{log_path}"', 'access_log'
+ ), 'access log configure'
- os.rename(log_path, f'{temp_dir}/{log_new}')
+ assert waitforfiles(log_path), 'open'
- assert self.get()['status'] == 200
+ os.rename(log_path, f'{temp_dir}/{log_new}')
- assert (
- self.wait_for_record(r'"GET / HTTP/1.1" 200 0 "-" "-"', log_new)
- is not None
- ), 'rename new'
- assert not os.path.isfile(log_path), 'rename old'
+ assert client.get()['status'] == 200
- os.kill(unit_pid, signal.SIGUSR1)
+ assert (
+ wait_for_record(r'"GET / HTTP/1.1" 200 0 "-" "-"', log_new) is not None
+ ), 'rename new'
+ assert not os.path.isfile(log_path), 'rename old'
- assert waitforfiles(log_path), 'reopen'
+ os.kill(unit_pid, signal.SIGUSR1)
- assert self.get(url='/usr1')['status'] == 200
+ assert waitforfiles(log_path), 'reopen'
- assert (
- self.wait_for_record(r'"GET /usr1 HTTP/1.1" 200 0 "-" "-"', log)
- is not None
- ), 'reopen 2'
- assert self.search_in_log(r'/usr1', log_new) is None, 'rename new 2'
+ assert client.get(url='/usr1')['status'] == 200
- def test_usr1_unit_log(self, temp_dir, unit_pid):
- self.load('log_body')
+ assert (
+ wait_for_record(r'"GET /usr1 HTTP/1.1" 200 0 "-" "-"', log) is not None
+ ), 'reopen 2'
+ assert search_in_file(r'/usr1', log_new) is None, 'rename new 2'
- log_new = 'new.log'
- log_path = f'{temp_dir}/unit.log'
- log_path_new = f'{temp_dir}/{log_new}'
- os.rename(log_path, log_path_new)
+def test_usr1_unit_log(search_in_file, temp_dir, unit_pid, wait_for_record):
+ client.load('log_body')
- Log.swap(log_new)
+ log_new = 'new.log'
+ log_path = f'{temp_dir}/unit.log'
+ log_path_new = f'{temp_dir}/{log_new}'
+
+ os.rename(log_path, log_path_new)
- try:
- body = 'body_for_a_log_new\n'
- assert self.post(body=body)['status'] == 200
+ Log.swap(log_new)
- assert self.wait_for_record(body, log_new) is not None, 'rename new'
- assert not os.path.isfile(log_path), 'rename old'
+ try:
+ body = 'body_for_a_log_new\n'
+ assert client.post(body=body)['status'] == 200
- os.kill(unit_pid, signal.SIGUSR1)
+ assert wait_for_record(body, log_new) is not None, 'rename new'
+ assert not os.path.isfile(log_path), 'rename old'
- assert waitforfiles(log_path), 'reopen'
+ os.kill(unit_pid, signal.SIGUSR1)
- body = 'body_for_a_log_unit\n'
- assert self.post(body=body)['status'] == 200
+ assert waitforfiles(log_path), 'reopen'
- assert self.wait_for_record(body) is not None, 'rename new'
- assert self.search_in_log(body, log_new) is None, 'rename new 2'
+ body = 'body_for_a_log_unit\n'
+ assert client.post(body=body)['status'] == 200
- finally:
- # merge two log files into unit.log to check alerts
+ assert wait_for_record(body) is not None, 'rename new'
+ assert search_in_file(body, log_new) is None, 'rename new 2'
- with open(log_path, 'r', errors='ignore') as unit_log:
- log = unit_log.read()
+ finally:
+ # merge two log files into unit.log to check alerts
- with open(log_path, 'w') as unit_log, open(
- log_path_new, 'r', errors='ignore'
- ) as unit_log_new:
- unit_log.write(unit_log_new.read())
- unit_log.write(log)
+ with open(log_path, 'r', errors='ignore') as unit_log:
+ log = unit_log.read()
- Log.swap(log_new)
+ with open(log_path, 'w') as unit_log, open(
+ log_path_new, 'r', errors='ignore'
+ ) as unit_log_new:
+ unit_log.write(unit_log_new.read())
+ unit_log.write(log)
+
+ Log.swap(log_new)
diff --git a/test/test_variables.py b/test/test_variables.py
index 545d61e9..c9b173fa 100644
--- a/test/test_variables.py
+++ b/test/test_variables.py
@@ -1,391 +1,520 @@
+import os
+from pathlib import Path
import re
import time
-from unit.applications.proto import TestApplicationProto
+import pytest
+from unit.applications.proto import ApplicationProto
+from unit.applications.lang.python import ApplicationPython
from unit.option import option
+client = ApplicationProto()
+client_python = ApplicationPython()
-class TestVariables(TestApplicationProto):
- prerequisites = {}
- def setup_method(self):
- assert 'success' in self.conf(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [{"action": {"return": 200}}],
- },
- ), 'configure routes'
-
- def set_format(self, format):
- assert 'success' in self.conf(
- {
- 'path': f'{option.temp_dir}/access.log',
- 'format': format,
- },
- 'access_log',
- ), 'access_log format'
+@pytest.fixture(autouse=True)
+def setup_method_fixture():
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [{"action": {"return": 200}}],
+ },
+ ), 'configure routes'
- def wait_for_record(self, pattern, name='access.log'):
- return super().wait_for_record(pattern, name)
- def search_in_log(self, pattern, name='access.log'):
- return super().search_in_log(pattern, name)
+def set_format(format):
+ assert 'success' in client.conf(
+ {
+ 'path': f'{option.temp_dir}/access.log',
+ 'format': format,
+ },
+ 'access_log',
+ ), 'access_log format'
- def test_variables_dollar(self):
- assert 'success' in self.conf("301", 'routes/0/action/return')
- def check_dollar(location, expect):
- assert 'success' in self.conf(
- f'"{location}"',
- 'routes/0/action/location',
- )
- assert self.get()['headers']['Location'] == expect
+def test_variables_dollar():
+ assert 'success' in client.conf("301", 'routes/0/action/return')
- check_dollar(
- 'https://${host}${uri}path${dollar}dollar',
- 'https://localhost/path$dollar',
+ def check_dollar(location, expect):
+ assert 'success' in client.conf(
+ f'"{location}"',
+ 'routes/0/action/location',
)
- check_dollar('path$dollar${dollar}', 'path$$')
+ assert client.get()['headers']['Location'] == expect
- def test_variables_request_time(self):
- self.set_format('$uri $request_time')
+ check_dollar(
+ 'https://${host}${uri}path${dollar}dollar',
+ 'https://localhost/path$dollar',
+ )
+ check_dollar('path$dollar${dollar}', 'path$$')
- sock = self.http(b'', raw=True, no_recv=True)
- time.sleep(1)
+def test_variables_request_time(wait_for_record):
+ set_format('$uri $request_time')
- assert self.get(url='/r_time_1', sock=sock)['status'] == 200
- assert self.wait_for_record(r'\/r_time_1 0\.\d{3}') is not None
+ sock = client.http(b'', raw=True, no_recv=True)
- sock = self.http(
- b"""G""",
- no_recv=True,
- raw=True,
- )
+ time.sleep(1)
+
+ assert client.get(url='/r_time_1', sock=sock)['status'] == 200
+ assert wait_for_record(r'\/r_time_1 0\.\d{3}', 'access.log') is not None
- time.sleep(2)
+ sock = client.http(
+ b"""G""",
+ no_recv=True,
+ raw=True,
+ )
- self.http(
- b"""ET /r_time_2 HTTP/1.1
+ time.sleep(2)
+
+ client.http(
+ b"""ET /r_time_2 HTTP/1.1
Host: localhost
Connection: close
""",
- sock=sock,
- raw=True,
+ sock=sock,
+ raw=True,
+ )
+ assert wait_for_record(r'\/r_time_2 [1-9]\.\d{3}', 'access.log') is not None
+
+
+def test_variables_method(search_in_file, wait_for_record):
+ set_format('$method')
+
+ reg = r'^GET$'
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get()['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None, 'method GET'
+
+ reg = r'^POST$'
+ assert search_in_file(reg, 'access.log') is None
+ assert client.post()['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None, 'method POST'
+
+
+def test_variables_request_uri(search_in_file, wait_for_record):
+ set_format('$request_uri')
+
+ def check_request_uri(req_uri):
+ reg = fr'^{re.escape(req_uri)}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(url=req_uri)['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
+
+ check_request_uri('/3')
+ check_request_uri('/4*')
+ check_request_uri('/4%2A')
+ check_request_uri('/9?q#a')
+
+
+def test_variables_uri(search_in_file, wait_for_record):
+ set_format('$uri')
+
+ def check_uri(uri, expect=None):
+ expect = uri if expect is None else expect
+ reg = fr'^{re.escape(expect)}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(url=uri)['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
+
+ check_uri('/3')
+ check_uri('/4*')
+ check_uri('/5%2A', '/5*')
+ check_uri('/9?q#a', '/9')
+
+
+def test_variables_uri_no_cache(temp_dir):
+ os.makedirs(f'{temp_dir}/foo/bar')
+ Path(f'{temp_dir}/foo/bar/index.html').write_text('index')
+
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "action": {
+ "rewrite": "/foo${uri}/",
+ "share": f'{temp_dir}$uri',
+ }
+ }
+ ],
+ }
+ )
+
+ assert client.get(url='/bar')['status'] == 200
+
+
+def test_variables_host(search_in_file, wait_for_record):
+ set_format('$host')
+
+ def check_host(host, expect=None):
+ expect = host if expect is None else expect
+ reg = fr'^{re.escape(expect)}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert (
+ client.get(headers={'Host': host, 'Connection': 'close'})['status']
+ == 200
)
- assert self.wait_for_record(r'\/r_time_2 [1-9]\.\d{3}') is not None
-
- def test_variables_method(self):
- self.set_format('$method')
-
- reg = r'^GET$'
- assert self.search_in_log(reg) is None
- assert self.get()['status'] == 200
- assert self.wait_for_record(reg) is not None, 'method GET'
-
- reg = r'^POST$'
- assert self.search_in_log(reg) is None
- assert self.post()['status'] == 200
- assert self.wait_for_record(reg) is not None, 'method POST'
-
- def test_variables_request_uri(self):
- self.set_format('$request_uri')
-
- def check_request_uri(req_uri):
- reg = fr'^{re.escape(req_uri)}$'
-
- assert self.search_in_log(reg) is None
- assert self.get(url=req_uri)['status'] == 200
- assert self.wait_for_record(reg) is not None
-
- check_request_uri('/3')
- check_request_uri('/4*')
- check_request_uri('/4%2A')
- check_request_uri('/9?q#a')
-
- def test_variables_uri(self):
- self.set_format('$uri')
-
- def check_uri(uri, expect=None):
- expect = uri if expect is None else expect
- reg = fr'^{re.escape(expect)}$'
-
- assert self.search_in_log(reg) is None
- assert self.get(url=uri)['status'] == 200
- assert self.wait_for_record(reg) is not None
-
- check_uri('/3')
- check_uri('/4*')
- check_uri('/5%2A', '/5*')
- check_uri('/9?q#a', '/9')
-
- def test_variables_host(self):
- self.set_format('$host')
-
- def check_host(host, expect=None):
- expect = host if expect is None else expect
- reg = fr'^{re.escape(expect)}$'
-
- assert self.search_in_log(reg) is None
- assert (
- self.get(headers={'Host': host, 'Connection': 'close'})[
- 'status'
- ]
- == 200
- )
- assert self.wait_for_record(reg) is not None
-
- check_host('localhost')
- check_host('localhost1.', 'localhost1')
- check_host('localhost2:7080', 'localhost2')
- check_host('.localhost')
- check_host('www.localhost')
-
- def test_variables_remote_addr(self):
- self.set_format('$remote_addr')
-
- assert self.get()['status'] == 200
- assert self.wait_for_record(r'^127\.0\.0\.1$') is not None
-
- assert 'success' in self.conf(
- {"[::1]:7080": {"pass": "routes"}}, 'listeners'
+ assert wait_for_record(reg, 'access.log') is not None
+
+ check_host('localhost')
+ check_host('localhost1.', 'localhost1')
+ check_host('localhost2:7080', 'localhost2')
+ check_host('.localhost')
+ check_host('www.localhost')
+
+
+def test_variables_remote_addr(search_in_file, wait_for_record):
+ set_format('$remote_addr')
+
+ assert client.get()['status'] == 200
+ assert wait_for_record(r'^127\.0\.0\.1$', 'access.log') is not None
+
+ assert 'success' in client.conf(
+ {"[::1]:7080": {"pass": "routes"}}, 'listeners'
+ )
+
+ reg = r'^::1$'
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(sock_type='ipv6')['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
+
+
+def test_variables_time_local(
+ date_to_sec_epoch, search_in_file, wait_for_record
+):
+ set_format('$uri $time_local $uri')
+
+ assert search_in_file(r'/time_local', 'access.log') is None
+ assert client.get(url='/time_local')['status'] == 200
+ assert wait_for_record(r'/time_local', 'access.log') is not None, 'time log'
+ date = search_in_file(r'^\/time_local (.*) \/time_local$', 'access.log')[1]
+ assert (
+ abs(
+ date_to_sec_epoch(date, '%d/%b/%Y:%X %z')
+ - time.mktime(time.localtime())
)
+ < 5
+ ), '$time_local'
+
- reg = r'^::1$'
- assert self.search_in_log(reg) is None
- assert self.get(sock_type='ipv6')['status'] == 200
- assert self.wait_for_record(reg) is not None
+def test_variables_request_line(search_in_file, wait_for_record):
+ set_format('$request_line')
- def test_variables_time_local(self):
- self.set_format('$uri $time_local $uri')
+ reg = r'^GET \/r_line HTTP\/1\.1$'
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(url='/r_line')['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
- assert self.search_in_log(r'/time_local') is None
- assert self.get(url='/time_local')['status'] == 200
- assert self.wait_for_record(r'/time_local') is not None, 'time log'
- date = self.search_in_log(
- r'^\/time_local (.*) \/time_local$', 'access.log'
- )[1]
+
+def test_variables_status(search_in_file, wait_for_record):
+ set_format('$status')
+
+ assert 'success' in client.conf("418", 'routes/0/action/return')
+
+ reg = r'^418$'
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get()['status'] == 418
+ assert wait_for_record(reg, 'access.log') is not None
+
+
+def test_variables_header_referer(search_in_file, wait_for_record):
+ set_format('$method $header_referer')
+
+ def check_referer(referer):
+ reg = fr'^GET {re.escape(referer)}$'
+
+ assert search_in_file(reg, 'access.log') is None
assert (
- abs(
- self.date_to_sec_epoch(date, '%d/%b/%Y:%X %z')
- - time.mktime(time.localtime())
- )
- < 5
- ), '$time_local'
-
- def test_variables_request_line(self):
- self.set_format('$request_line')
-
- reg = r'^GET \/r_line HTTP\/1\.1$'
- assert self.search_in_log(reg) is None
- assert self.get(url='/r_line')['status'] == 200
- assert self.wait_for_record(reg) is not None
-
- def test_variables_status(self):
- self.set_format('$status')
-
- assert 'success' in self.conf("418", 'routes/0/action/return')
-
- reg = r'^418$'
- assert self.search_in_log(reg) is None
- assert self.get()['status'] == 418
- assert self.wait_for_record(reg) is not None
-
- def test_variables_header_referer(self):
- self.set_format('$method $header_referer')
-
- def check_referer(referer):
- reg = fr'^GET {re.escape(referer)}$'
-
- assert self.search_in_log(reg) is None
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'Referer': referer,
- }
- )['status']
- == 200
- )
- assert self.wait_for_record(reg) is not None
-
- check_referer('referer-value')
- check_referer('')
- check_referer('no')
-
- def test_variables_header_user_agent(self):
- self.set_format('$method $header_user_agent')
-
- def check_user_agent(user_agent):
- reg = fr'^GET {re.escape(user_agent)}$'
-
- assert self.search_in_log(reg) is None
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Connection': 'close',
- 'User-Agent': user_agent,
- }
- )['status']
- == 200
- )
- assert self.wait_for_record(reg) is not None
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ 'Referer': referer,
+ }
+ )['status']
+ == 200
+ )
+ assert wait_for_record(reg, 'access.log') is not None
- check_user_agent('MSIE')
- check_user_agent('')
- check_user_agent('no')
+ check_referer('referer-value')
+ check_referer('')
+ check_referer('no')
- def test_variables_many(self):
- def check_vars(uri, expect):
- reg = fr'^{re.escape(expect)}$'
- assert self.search_in_log(reg) is None
- assert self.get(url=uri)['status'] == 200
- assert self.wait_for_record(reg) is not None
+def test_variables_header_user_agent(search_in_file, wait_for_record):
+ set_format('$method $header_user_agent')
- self.set_format('$uri$method')
- check_vars('/1', '/1GET')
+ def check_user_agent(user_agent):
+ reg = fr'^GET {re.escape(user_agent)}$'
- self.set_format('${uri}${method}')
- check_vars('/2', '/2GET')
+ assert search_in_file(reg, 'access.log') is None
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Connection': 'close',
+ 'User-Agent': user_agent,
+ }
+ )['status']
+ == 200
+ )
+ assert wait_for_record(reg, 'access.log') is not None
+
+ check_user_agent('MSIE')
+ check_user_agent('')
+ check_user_agent('no')
+
+
+def test_variables_many(search_in_file, wait_for_record):
+ def check_vars(uri, expect):
+ reg = fr'^{re.escape(expect)}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(url=uri)['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
+
+ set_format('$uri$method')
+ check_vars('/1', '/1GET')
+
+ set_format('${uri}${method}')
+ check_vars('/2', '/2GET')
+
+ set_format('${uri}$method')
+ check_vars('/3', '/3GET')
+
+ set_format('$method$method')
+ check_vars('/', 'GETGET')
+
+
+def test_variables_dynamic(wait_for_record):
+ set_format('$header_foo$cookie_foo$arg_foo')
+
+ assert (
+ client.get(
+ url='/?foo=h',
+ headers={'Foo': 'b', 'Cookie': 'foo=la', 'Connection': 'close'},
+ )['status']
+ == 200
+ )
+ assert wait_for_record(r'^blah$', 'access.log') is not None
+
+
+def test_variables_dynamic_arguments(search_in_file, wait_for_record):
+ def check_arg(url, expect=None):
+ expect = url if expect is None else expect
+ reg = fr'^{re.escape(expect)}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert client.get(url=url)['status'] == 200
+ assert wait_for_record(reg, 'access.log') is not None
+
+ def check_no_arg(url):
+ assert client.get(url=url)['status'] == 200
+ assert search_in_file(r'^0$', 'access.log') is None
- self.set_format('${uri}$method')
- check_vars('/3', '/3GET')
+ set_format('$arg_foo_bar')
+ check_arg('/?foo_bar=1', '1')
+ check_arg('/?foo_b%61r=2', '2')
+ check_arg('/?bar&foo_bar=3&foo', '3')
+ check_arg('/?foo_bar=l&foo_bar=4', '4')
+ check_no_arg('/')
+ check_no_arg('/?foo_bar=')
+ check_no_arg('/?Foo_bar=0')
+ check_no_arg('/?foo-bar=0')
+ check_no_arg('/?foo_bar=0&foo_bar=l')
- self.set_format('$method$method')
- check_vars('/', 'GETGET')
+ set_format('$arg_foo_b%61r')
+ check_no_arg('/?foo_b=0')
+ check_no_arg('/?foo_bar=0')
- def test_variables_dynamic(self):
- self.set_format('$header_foo$cookie_foo$arg_foo')
+ set_format('$arg_f!~')
+ check_no_arg('/?f=0')
+ check_no_arg('/?f!~=0')
+
+def test_variables_dynamic_headers(search_in_file, wait_for_record):
+ def check_header(header, value):
+ reg = fr'^{value}$'
+
+ assert search_in_file(reg, 'access.log') is None
+ assert (
+ client.get(headers={header: value, 'Connection': 'close'})['status']
+ == 200
+ )
+ assert wait_for_record(reg, 'access.log') is not None
+
+ def check_no_header(header):
assert (
- self.get(
- url='/?foo=h',
- headers={'Foo': 'b', 'Cookie': 'foo=la', 'Connection': 'close'},
+ client.get(headers={header: '0', 'Connection': 'close'})['status']
+ == 200
+ )
+ assert search_in_file(r'^0$', 'access.log') is None
+
+ set_format('$header_foo_bar')
+ check_header('foo-bar', '1')
+ check_header('Foo-Bar', '2')
+ check_no_header('foo_bar')
+ check_no_header('foobar')
+
+ set_format('$header_Foo_Bar')
+ check_header('Foo-Bar', '4')
+ check_header('foo-bar', '5')
+ check_no_header('foo_bar')
+ check_no_header('foobar')
+
+
+def test_variables_dynamic_cookies(search_in_file, wait_for_record):
+ def check_no_cookie(cookie):
+ assert (
+ client.get(
+ headers={
+ 'Host': 'localhost',
+ 'Cookie': cookie,
+ 'Connection': 'close',
+ },
)['status']
== 200
)
- assert self.wait_for_record(r'^blah$') is not None
-
- def test_variables_dynamic_arguments(self):
- def check_arg(url, expect=None):
- expect = url if expect is None else expect
- reg = fr'^{re.escape(expect)}$'
-
- assert self.search_in_log(reg) is None
- assert self.get(url=url)['status'] == 200
- assert self.wait_for_record(reg) is not None
-
- def check_no_arg(url):
- assert self.get(url=url)['status'] == 200
- assert self.search_in_log(r'^0$') is None
-
- self.set_format('$arg_foo_bar')
- check_arg('/?foo_bar=1', '1')
- check_arg('/?foo_b%61r=2', '2')
- check_arg('/?bar&foo_bar=3&foo', '3')
- check_arg('/?foo_bar=l&foo_bar=4', '4')
- check_no_arg('/')
- check_no_arg('/?foo_bar=')
- check_no_arg('/?Foo_bar=0')
- check_no_arg('/?foo-bar=0')
- check_no_arg('/?foo_bar=0&foo_bar=l')
-
- self.set_format('$arg_foo_b%61r')
- check_no_arg('/?foo_b=0')
- check_no_arg('/?foo_bar=0')
-
- self.set_format('$arg_f!~')
- check_no_arg('/?f=0')
- check_no_arg('/?f!~=0')
-
- def test_variables_dynamic_headers(self):
- def check_header(header, value):
- reg = fr'^{value}$'
-
- assert self.search_in_log(reg) is None
- assert (
- self.get(headers={header: value, 'Connection': 'close'})[
- 'status'
- ]
- == 200
- )
- assert self.wait_for_record(reg) is not None
-
- def check_no_header(header):
- assert (
- self.get(headers={header: '0', 'Connection': 'close'})['status']
- == 200
- )
- assert self.search_in_log(r'^0$') is None
-
- self.set_format('$header_foo_bar')
- check_header('foo-bar', '1')
- check_header('Foo-Bar', '2')
- check_no_header('foo_bar')
- check_no_header('foobar')
-
- self.set_format('$header_Foo_Bar')
- check_header('Foo-Bar', '4')
- check_header('foo-bar', '5')
- check_no_header('foo_bar')
- check_no_header('foobar')
-
- def test_variables_dynamic_cookies(self):
- def check_no_cookie(cookie):
- assert (
- self.get(
- headers={
- 'Host': 'localhost',
- 'Cookie': cookie,
- 'Connection': 'close',
- },
- )['status']
- == 200
- )
- assert self.search_in_log(r'^0$') is None
-
- self.set_format('$cookie_foo_bar')
-
- reg = r'^1$'
- assert self.search_in_log(reg) is None
- self.get(
+ assert search_in_file(r'^0$', 'access.log') is None
+
+ set_format('$cookie_foo_bar')
+
+ reg = r'^1$'
+ assert search_in_file(reg, 'access.log') is None
+ assert (
+ client.get(
headers={
'Host': 'localhost',
'Cookie': 'foo_bar=1',
'Connection': 'close',
},
- )['status'] == 200
- assert self.wait_for_record(reg) is not None
+ )['status']
+ == 200
+ )
+ assert wait_for_record(reg, 'access.log') is not None
+
+ check_no_cookie('fOo_bar=0')
+ check_no_cookie('foo_bar=')
+
+
+def test_variables_response_header(temp_dir, wait_for_record):
+ # If response has two headers with the same name then first value
+ # will be stored in variable.
+ # $response_header_transfer_encoding value can be 'chunked' or null only.
+
+ # return
+
+ set_format(
+ 'return@$response_header_server@$response_header_date@'
+ '$response_header_content_length@$response_header_connection'
+ )
+
+ assert client.get()['status'] == 200
+ assert (
+ wait_for_record(r'return@Unit/.*@.*GMT@0@close', 'access.log')
+ is not None
+ )
+
+ # share
- check_no_cookie('fOo_bar=0')
- check_no_cookie('foo_bar=')
+ Path(f'{temp_dir}/foo').mkdir()
+ Path(f'{temp_dir}/foo/index.html').write_text('index')
- def test_variables_invalid(self):
- def check_variables(format):
- assert 'error' in self.conf(
+ assert 'success' in client.conf(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
{
- 'path': f'{option.temp_dir}/access.log',
- 'format': format,
- },
- 'access_log',
- ), 'access_log format'
-
- check_variables("$")
- check_variables("${")
- check_variables("${}")
- check_variables("$ur")
- check_variables("$uri$$host")
- check_variables("$uriblah")
- check_variables("${uri")
- check_variables("${{uri}")
- check_variables("$ar")
- check_variables("$arg")
- check_variables("$arg_")
- check_variables("$cookie")
- check_variables("$cookie_")
- check_variables("$header")
- check_variables("$header_")
+ "action": {
+ "share": f'{temp_dir}$uri',
+ }
+ }
+ ],
+ }
+ )
+
+ set_format(
+ 'share@$response_header_last_modified@$response_header_etag@'
+ '$response_header_content_type@$response_header_server@'
+ '$response_header_date@$response_header_content_length@'
+ '$response_header_connection'
+ )
+
+ assert client.get(url='/foo/index.html')['status'] == 200
+ assert (
+ wait_for_record(
+ r'share@.*GMT@".*"@text/html@Unit/.*@.*GMT@5@close', 'access.log'
+ )
+ is not None
+ )
+
+ # redirect
+
+ set_format(
+ 'redirect@$response_header_location@$response_header_server@'
+ '$response_header_date@$response_header_content_length@'
+ '$response_header_connection'
+ )
+
+ assert client.get(url='/foo')['status'] == 301
+ assert (
+ wait_for_record(r'redirect@/foo/@Unit/.*@.*GMT@0@close', 'access.log')
+ is not None
+ )
+
+ # error
+
+ set_format(
+ 'error@$response_header_content_type@$response_header_server@'
+ '$response_header_date@$response_header_content_length@'
+ '$response_header_connection'
+ )
+
+ assert client.get(url='/blah')['status'] == 404
+ assert (
+ wait_for_record(r'error@text/html@Unit/.*@.*GMT@54@close', 'access.log')
+ is not None
+ )
+
+
+def test_variables_response_header_application(require, wait_for_record):
+ require({'modules': {'python': 'any'}})
+
+ client_python.load('chunked')
+
+ set_format('$uri@$response_header_transfer_encoding')
+
+ assert client_python.get(url='/1')['status'] == 200
+ assert wait_for_record(r'/1@chunked', 'access.log') is not None
+
+
+def test_variables_invalid(temp_dir):
+ def check_variables(format):
+ assert 'error' in client.conf(
+ {
+ 'path': f'{temp_dir}/access.log',
+ 'format': format,
+ },
+ 'access_log',
+ ), 'access_log format'
+
+ check_variables("$")
+ check_variables("${")
+ check_variables("${}")
+ check_variables("$ur")
+ check_variables("$uri$$host")
+ check_variables("$uriblah")
+ check_variables("${uri")
+ check_variables("${{uri}")
+ check_variables("$ar")
+ check_variables("$arg")
+ check_variables("$arg_")
+ check_variables("$cookie")
+ check_variables("$cookie_")
+ check_variables("$header")
+ check_variables("$header_")
diff --git a/test/unit/applications/lang/go.py b/test/unit/applications/lang/go.py
index 557753a4..93e0738b 100644
--- a/test/unit/applications/lang/go.py
+++ b/test/unit/applications/lang/go.py
@@ -2,11 +2,11 @@ import os
import shutil
import subprocess
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationGo(TestApplicationProto):
+class ApplicationGo(ApplicationProto):
@staticmethod
def prepare_env(script, name='app', static=False):
try:
@@ -88,7 +88,7 @@ replace unit.nginx.org/go => {replace_path}
executable = f"/go/{name}"
static_build = True
- TestApplicationGo.prepare_env(script, name, static=static_build)
+ ApplicationGo.prepare_env(script, name, static=static_build)
conf = {
"listeners": {"*:7080": {"pass": f"applications/{script}"}},
diff --git a/test/unit/applications/lang/java.py b/test/unit/applications/lang/java.py
index b6382cfe..a253aea5 100644
--- a/test/unit/applications/lang/java.py
+++ b/test/unit/applications/lang/java.py
@@ -4,12 +4,13 @@ import shutil
import subprocess
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationJava(TestApplicationProto):
- application_type = "java"
+class ApplicationJava(ApplicationProto):
+ def __init__(self, application_type='java'):
+ self.application_type = application_type
def prepare_env(self, script):
app_path = f'{option.temp_dir}/java'
@@ -52,7 +53,7 @@ class TestApplicationJava(TestApplicationProto):
os.makedirs(classes_path)
classpath = (
- f'{option.current_dir}/build/tomcat-servlet-api-9.0.70.jar'
+ f'{option.current_dir}/build/tomcat-servlet-api-9.0.75.jar'
)
ws_jars = glob.glob(
diff --git a/test/unit/applications/lang/node.py b/test/unit/applications/lang/node.py
index 87d5a19c..4f18c780 100644
--- a/test/unit/applications/lang/node.py
+++ b/test/unit/applications/lang/node.py
@@ -1,14 +1,15 @@
import shutil
from urllib.parse import quote
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
from unit.utils import public_dir
-class TestApplicationNode(TestApplicationProto):
- application_type = "node"
- es_modules = False
+class ApplicationNode(ApplicationProto):
+ def __init__(self, application_type='node', es_modules=False):
+ self.application_type = application_type
+ self.es_modules = es_modules
def prepare_env(self, script):
# copy application
diff --git a/test/unit/applications/lang/perl.py b/test/unit/applications/lang/perl.py
index 19852363..037e98e8 100644
--- a/test/unit/applications/lang/perl.py
+++ b/test/unit/applications/lang/perl.py
@@ -1,9 +1,10 @@
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationPerl(TestApplicationProto):
- application_type = "perl"
+class ApplicationPerl(ApplicationProto):
+ def __init__(self, application_type='perl'):
+ self.application_type = application_type
def load(self, script, name='psgi.pl', **kwargs):
script_path = f'{option.test_dir}/perl/{script}'
diff --git a/test/unit/applications/lang/php.py b/test/unit/applications/lang/php.py
index 1b94c3ae..b9b6dbf1 100644
--- a/test/unit/applications/lang/php.py
+++ b/test/unit/applications/lang/php.py
@@ -1,12 +1,13 @@
import os
import shutil
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationPHP(TestApplicationProto):
- application_type = "php"
+class ApplicationPHP(ApplicationProto):
+ def __init__(self, application_type='php'):
+ self.application_type = application_type
def load(self, script, index='index.php', **kwargs):
script_path = f'{option.test_dir}/php/{script}'
diff --git a/test/unit/applications/lang/python.py b/test/unit/applications/lang/python.py
index 0bb69992..4e1fd897 100644
--- a/test/unit/applications/lang/python.py
+++ b/test/unit/applications/lang/python.py
@@ -2,13 +2,14 @@ import os
import shutil
from urllib.parse import quote
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationPython(TestApplicationProto):
- application_type = "python"
- load_module = "wsgi"
+class ApplicationPython(ApplicationProto):
+ def __init__(self, application_type='python', load_module='wsgi'):
+ self.application_type = application_type
+ self.load_module = load_module
def load(self, script, name=None, module=None, **kwargs):
if name is None:
diff --git a/test/unit/applications/lang/ruby.py b/test/unit/applications/lang/ruby.py
index e0712fc6..f6c4f6c3 100644
--- a/test/unit/applications/lang/ruby.py
+++ b/test/unit/applications/lang/ruby.py
@@ -1,12 +1,13 @@
import shutil
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
from unit.utils import public_dir
-class TestApplicationRuby(TestApplicationProto):
- application_type = "ruby"
+class ApplicationRuby(ApplicationProto):
+ def __init__(self, application_type='ruby'):
+ self.application_type = application_type
def prepare_env(self, script):
shutil.copytree(
diff --git a/test/unit/applications/proto.py b/test/unit/applications/proto.py
index f04ee408..7a1636c6 100644
--- a/test/unit/applications/proto.py
+++ b/test/unit/applications/proto.py
@@ -1,41 +1,12 @@
import os
-import re
-import time
-from unit.control import TestControl
-from unit.log import Log
+from unit.control import Control
from unit.option import option
-class TestApplicationProto(TestControl):
+class ApplicationProto(Control):
application_type = None
- def sec_epoch(self):
- return time.mktime(time.gmtime())
-
- def date_to_sec_epoch(self, date, template='%a, %d %b %Y %X %Z'):
- return time.mktime(time.strptime(date, template))
-
- def findall(self, pattern, name='unit.log', flags=re.M):
- with Log.open(name) as f:
- return re.findall(pattern, f.read(), flags)
-
- def search_in_log(self, pattern, name='unit.log', flags=re.M):
- with Log.open(name) as f:
- return re.search(pattern, f.read(), flags)
-
- def wait_for_record(self, pattern, name='unit.log', wait=150, flags=re.M):
- with Log.open(name) as f:
- for i in range(wait):
- found = re.search(pattern, f.read(), flags)
-
- if found is not None:
- break
-
- time.sleep(0.1)
-
- return found
-
def get_application_type(self):
current_test = (
os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
diff --git a/test/unit/applications/tls.py b/test/unit/applications/tls.py
index e5813312..e9bcc514 100644
--- a/test/unit/applications/tls.py
+++ b/test/unit/applications/tls.py
@@ -2,15 +2,15 @@ import os
import ssl
import subprocess
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
from unit.option import option
-class TestApplicationTLS(TestApplicationProto):
- def setup_method(self):
- self.context = ssl.create_default_context()
- self.context.check_hostname = False
- self.context.verify_mode = ssl.CERT_NONE
+class ApplicationTLS(ApplicationProto):
+ def __init__(self):
+ self._default_context = ssl.create_default_context()
+ self._default_context.check_hostname = False
+ self._default_context.verify_mode = ssl.CERT_NONE
def certificate(self, name='default', load=True):
self.openssl_conf()
@@ -47,10 +47,12 @@ class TestApplicationTLS(TestApplicationProto):
return self.conf(k.read() + c.read(), f'/certificates/{crt}')
def get_ssl(self, **kwargs):
- return self.get(wrapper=self.context.wrap_socket, **kwargs)
+ context = kwargs.get('context', self._default_context)
+ return self.get(wrapper=context.wrap_socket, **kwargs)
def post_ssl(self, **kwargs):
- return self.post(wrapper=self.context.wrap_socket, **kwargs)
+ context = kwargs.get('context', self._default_context)
+ return self.post(wrapper=context.wrap_socket, **kwargs)
def openssl_conf(self, rewrite=False, alt_names=None):
alt_names = alt_names or []
diff --git a/test/unit/applications/websockets.py b/test/unit/applications/websockets.py
index a4b9287d..29725943 100644
--- a/test/unit/applications/websockets.py
+++ b/test/unit/applications/websockets.py
@@ -6,12 +6,12 @@ import select
import struct
import pytest
-from unit.applications.proto import TestApplicationProto
+from unit.applications.proto import ApplicationProto
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
-class TestApplicationWebsocket(TestApplicationProto):
+class ApplicationWebsocket(ApplicationProto):
OP_CONT = 0x00
OP_TEXT = 0x01
diff --git a/test/unit/check/check_prerequisites.py b/test/unit/check/check_prerequisites.py
new file mode 100644
index 00000000..44c3f10f
--- /dev/null
+++ b/test/unit/check/check_prerequisites.py
@@ -0,0 +1,63 @@
+import pytest
+from unit.option import option
+
+
+def check_prerequisites(prerequisites):
+ if 'privileged_user' in prerequisites:
+ if prerequisites['privileged_user'] and not option.is_privileged:
+ pytest.skip(
+ 'privileged user required',
+ allow_module_level=True,
+ )
+ elif not prerequisites['privileged_user'] and option.is_privileged:
+ pytest.skip(
+ 'unprivileged user required',
+ allow_module_level=True,
+ )
+
+ missed = []
+
+ # check modules
+
+ if 'modules' in prerequisites:
+ available = option.available['modules']
+
+ for module in prerequisites['modules']:
+ if module in available and available[module]:
+ continue
+
+ missed.append(module)
+
+ if missed:
+ pytest.skip(
+ f'Unit has no {", ".join(missed)} module(s)',
+ allow_module_level=True,
+ )
+
+ # check features
+
+ if 'features' in prerequisites:
+ available = option.available['features']
+ require = prerequisites['features']
+
+ for feature in require:
+ avail_feature = available[feature]
+
+ if feature in available and avail_feature:
+ if isinstance(require[feature], list) and isinstance(
+ avail_feature, dict
+ ):
+ avail_keys = avail_feature.keys()
+
+ for key in require[feature]:
+ if key not in avail_keys:
+ missed.append(f'{feature}/{key}')
+ continue
+
+ missed.append(feature)
+
+ if missed:
+ pytest.skip(
+ f'{", ".join(missed)} feature(s) not supported',
+ allow_module_level=True,
+ )
diff --git a/test/unit/check/chroot.py b/test/unit/check/chroot.py
index 1b7aae90..b749fab6 100644
--- a/test/unit/check/chroot.py
+++ b/test/unit/check/chroot.py
@@ -1,32 +1,30 @@
import json
-from unit.http import TestHTTP
+from unit.http import HTTP1
from unit.option import option
-http = TestHTTP()
+http = HTTP1()
def check_chroot():
- available = option.available
-
- resp = http.put(
- url='/config',
- sock_type='unix',
- addr=f'{option.temp_dir}/control.unit.sock',
- body=json.dumps(
- {
- "listeners": {"*:7080": {"pass": "routes"}},
- "routes": [
- {
- "action": {
- "share": option.temp_dir,
- "chroot": option.temp_dir,
+ return (
+ 'success'
+ in http.put(
+ url='/config',
+ sock_type='unix',
+ addr=f'{option.temp_dir}/control.unit.sock',
+ body=json.dumps(
+ {
+ "listeners": {"*:7080": {"pass": "routes"}},
+ "routes": [
+ {
+ "action": {
+ "share": option.temp_dir,
+ "chroot": option.temp_dir,
+ }
}
- }
- ],
- }
- ),
+ ],
+ }
+ ),
+ )['body']
)
-
- if 'success' in resp['body']:
- available['features']['chroot'] = True
diff --git a/test/unit/check/discover_available.py b/test/unit/check/discover_available.py
new file mode 100644
index 00000000..0942581b
--- /dev/null
+++ b/test/unit/check/discover_available.py
@@ -0,0 +1,47 @@
+import subprocess
+import sys
+
+from unit.check.chroot import check_chroot
+from unit.check.go import check_go
+from unit.check.isolation import check_isolation
+from unit.check.njs import check_njs
+from unit.check.node import check_node
+from unit.check.regex import check_regex
+from unit.check.tls import check_openssl
+from unit.check.unix_abstract import check_unix_abstract
+from unit.log import Log
+from unit.option import option
+
+
+def discover_available(unit):
+ output_version = subprocess.check_output(
+ [unit['unitd'], '--version'], stderr=subprocess.STDOUT
+ ).decode()
+
+ # wait for controller start
+
+ if Log.wait_for_record(r'controller started') is None:
+ Log.print_log()
+ sys.exit("controller didn't start")
+
+ # discover modules from log file
+
+ for module in Log.findall(r'module: ([a-zA-Z]+) (.*) ".*"$'):
+ versions = option.available['modules'].setdefault(module[0], [])
+ if module[1] not in versions:
+ versions.append(module[1])
+
+ # discover modules using check
+
+ option.available['modules']['go'] = check_go()
+ option.available['modules']['njs'] = check_njs(output_version)
+ option.available['modules']['node'] = check_node()
+ option.available['modules']['openssl'] = check_openssl(output_version)
+ option.available['modules']['regex'] = check_regex(output_version)
+
+ # Discover features using check. Features should be discovered after
+ # modules since some features can require modules.
+
+ option.available['features']['chroot'] = check_chroot()
+ option.available['features']['isolation'] = check_isolation()
+ option.available['features']['unix_abstract'] = check_unix_abstract()
diff --git a/test/unit/check/go.py b/test/unit/check/go.py
index 09ae641d..1ecd429b 100644
--- a/test/unit/check/go.py
+++ b/test/unit/check/go.py
@@ -1,6 +1,5 @@
-from unit.applications.lang.go import TestApplicationGo
+from unit.applications.lang.go import ApplicationGo
def check_go():
- if TestApplicationGo.prepare_env('empty') is not None:
- return True
+ return ApplicationGo.prepare_env('empty') is not None
diff --git a/test/unit/check/isolation.py b/test/unit/check/isolation.py
index 4ebce893..e4674f4d 100644
--- a/test/unit/check/isolation.py
+++ b/test/unit/check/isolation.py
@@ -1,25 +1,24 @@
import json
import os
-from unit.applications.lang.go import TestApplicationGo
-from unit.applications.lang.java import TestApplicationJava
-from unit.applications.lang.node import TestApplicationNode
-from unit.applications.lang.ruby import TestApplicationRuby
-from unit.http import TestHTTP
+from unit.applications.lang.go import ApplicationGo
+from unit.applications.lang.java import ApplicationJava
+from unit.applications.lang.node import ApplicationNode
+from unit.applications.lang.ruby import ApplicationRuby
+from unit.http import HTTP1
from unit.option import option
from unit.utils import getns
allns = ['pid', 'mnt', 'ipc', 'uts', 'cgroup', 'net']
-http = TestHTTP()
+http = HTTP1()
def check_isolation():
- test_conf = {"namespaces": {"credential": True}}
available = option.available
conf = ''
if 'go' in available['modules']:
- TestApplicationGo().prepare_env('empty', 'app')
+ ApplicationGo().prepare_env('empty', 'app')
conf = {
"listeners": {"*:7080": {"pass": "applications/empty"}},
@@ -65,7 +64,7 @@ def check_isolation():
}
elif 'ruby' in available['modules']:
- TestApplicationRuby().prepare_env('empty')
+ ApplicationRuby().prepare_env('empty')
conf = {
"listeners": {"*:7080": {"pass": "applications/empty"}},
@@ -81,7 +80,7 @@ def check_isolation():
}
elif 'java' in available['modules']:
- TestApplicationJava().prepare_env('empty')
+ ApplicationJava().prepare_env('empty')
conf = {
"listeners": {"*:7080": {"pass": "applications/empty"}},
@@ -98,7 +97,7 @@ def check_isolation():
}
elif 'node' in available['modules']:
- TestApplicationNode().prepare_env('basic')
+ ApplicationNode().prepare_env('basic')
conf = {
"listeners": {"*:7080": {"pass": "applications/basic"}},
@@ -128,7 +127,7 @@ def check_isolation():
}
else:
- return
+ return False
resp = http.put(
url='/config',
@@ -138,23 +137,23 @@ def check_isolation():
)
if 'success' not in resp['body']:
- return
+ return False
userns = getns('user')
if not userns:
- return
+ return False
- available['features']['isolation'] = {'user': userns}
+ isolation = {'user': userns}
unp_clone_path = '/proc/sys/kernel/unprivileged_userns_clone'
if os.path.exists(unp_clone_path):
with open(unp_clone_path, 'r') as f:
if str(f.read()).rstrip() == '1':
- available['features']['isolation'][
- 'unprivileged_userns_clone'
- ] = True
+ isolation['unprivileged_userns_clone'] = True
for ns in allns:
ns_value = getns(ns)
if ns_value:
- available['features']['isolation'][ns] = ns_value
+ isolation[ns] = ns_value
+
+ return isolation
diff --git a/test/unit/check/njs.py b/test/unit/check/njs.py
index 433473a1..363a1b62 100644
--- a/test/unit/check/njs.py
+++ b/test/unit/check/njs.py
@@ -2,5 +2,4 @@ import re
def check_njs(output_version):
- if re.search('--njs', output_version):
- return True
+ return re.search('--njs', output_version)
diff --git a/test/unit/check/node.py b/test/unit/check/node.py
index dd59e7a4..6a3d581f 100644
--- a/test/unit/check/node.py
+++ b/test/unit/check/node.py
@@ -1,10 +1,12 @@
import os
import subprocess
+from unit.option import option
-def check_node(current_dir):
- if not os.path.exists(f'{current_dir}/node/node_modules'):
- return None
+
+def check_node():
+ if not os.path.exists(f'{option.current_dir}/node/node_modules'):
+ return False
try:
v_bytes = subprocess.check_output(['/usr/bin/env', 'node', '-v'])
@@ -12,4 +14,4 @@ def check_node(current_dir):
return [str(v_bytes, 'utf-8').lstrip('v').rstrip()]
except subprocess.CalledProcessError:
- return None
+ return False
diff --git a/test/unit/check/regex.py b/test/unit/check/regex.py
index 51cf966b..83e93f2d 100644
--- a/test/unit/check/regex.py
+++ b/test/unit/check/regex.py
@@ -2,7 +2,4 @@ import re
def check_regex(output_version):
- if re.search('--no-regex', output_version):
- return False
-
- return True
+ return not re.search('--no-regex', output_version)
diff --git a/test/unit/check/tls.py b/test/unit/check/tls.py
index 53ce5ffc..9cc2a5f9 100644
--- a/test/unit/check/tls.py
+++ b/test/unit/check/tls.py
@@ -6,7 +6,6 @@ def check_openssl(output_version):
try:
subprocess.check_output(['which', 'openssl'])
except subprocess.CalledProcessError:
- return None
+ return False
- if re.search('--openssl', output_version):
- return True
+ return re.search('--openssl', output_version)
diff --git a/test/unit/check/unix_abstract.py b/test/unit/check/unix_abstract.py
index aadde43a..8fc7dd84 100644
--- a/test/unit/check/unix_abstract.py
+++ b/test/unit/check/unix_abstract.py
@@ -1,25 +1,25 @@
import json
-from unit.http import TestHTTP
+from unit.http import HTTP1
from unit.option import option
-http = TestHTTP()
+http = HTTP1()
def check_unix_abstract():
- available = option.available
-
- resp = http.put(
- url='/config',
- sock_type='unix',
- addr=f'{option.temp_dir}/control.unit.sock',
- body=json.dumps(
- {
- "listeners": {"unix:@sock": {"pass": "routes"}},
- "routes": [],
- }
- ),
+ return (
+ 'success'
+ in http.put(
+ url='/config',
+ sock_type='unix',
+ addr=f'{option.temp_dir}/control.unit.sock',
+ body=json.dumps(
+ {
+ "listeners": {
+ f'unix:@{option.temp_dir}/sock': {"pass": "routes"}
+ },
+ "routes": [],
+ }
+ ),
+ )['body']
)
-
- if 'success' in resp['body']:
- available['features']['unix_abstract'] = True
diff --git a/test/unit/control.py b/test/unit/control.py
index 61b6edf4..164d0e60 100644
--- a/test/unit/control.py
+++ b/test/unit/control.py
@@ -1,6 +1,6 @@
import json
-from unit.http import TestHTTP
+from unit.http import HTTP1
from unit.option import option
@@ -29,7 +29,7 @@ def args_handler(conf_func):
return args_wrapper
-class TestControl(TestHTTP):
+class Control(HTTP1):
@args_handler
def conf(self, conf, url):
return self.put(**self._get_args(url, conf))['body']
diff --git a/test/unit/http.py b/test/unit/http.py
index 6a267e26..347382f5 100644
--- a/test/unit/http.py
+++ b/test/unit/http.py
@@ -10,7 +10,7 @@ import pytest
from unit.option import option
-class TestHTTP:
+class HTTP1:
def http(self, start_str, **kwargs):
sock_type = kwargs.get('sock_type', 'ipv4')
port = kwargs.get('port', 7080)
diff --git a/test/unit/log.py b/test/unit/log.py
index f984d7a1..7d7e355a 100644
--- a/test/unit/log.py
+++ b/test/unit/log.py
@@ -1,23 +1,113 @@
+import os
+import re
+import sys
+import time
+
+from unit.option import option
+
UNIT_LOG = 'unit.log'
+def print_log_on_assert(func):
+ def inner_function(*args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except AssertionError as exception:
+ Log.print_log(*args, **kwargs)
+ raise exception
+
+ return inner_function
+
+
class Log:
- temp_dir = None
pos = {}
- def open(name=UNIT_LOG, encoding=None):
- f = open(Log.get_path(name), 'r', encoding=encoding, errors='ignore')
- f.seek(Log.pos.get(name, 0))
+ @staticmethod
+ @print_log_on_assert
+ def check_alerts(log=None):
+ if log is None:
+ log = Log.read()
+
+ found = False
+ alerts = re.findall(r'.+\[alert\].+', log)
+
+ if alerts:
+ found = True
- return f
+ if option.detailed:
+ print('\nAll alerts/sanitizer errors found in log:')
+ _ = [print(alert) for alert in alerts]
+ if option.skip_alerts:
+ for skip in option.skip_alerts:
+ alerts = [al for al in alerts if re.search(skip, al) is None]
+
+ assert not alerts, 'alert(s)'
+
+ if not option.skip_sanitizer:
+ sanitizer_errors = re.findall('.+Sanitizer.+', log)
+
+ assert not sanitizer_errors, 'sanitizer error(s)'
+
+ if found and option.detailed:
+ print('skipped.')
+
+ @staticmethod
+ def findall(pattern, name=UNIT_LOG, flags=re.M):
+ return re.findall(pattern, Log.read(name), flags)
+
+ @staticmethod
+ def get_path(name=UNIT_LOG):
+ return f'{option.temp_dir}/{name}'
+
+ @staticmethod
+ def open(name=UNIT_LOG, encoding='utf-8'):
+ file = open(Log.get_path(name), 'r', encoding=encoding, errors='ignore')
+ file.seek(Log.pos.get(name, 0))
+
+ return file
+
+ @staticmethod
+ def print_log(log=None):
+ Log.print_path()
+
+ if option.print_log:
+ os.set_blocking(sys.stdout.fileno(), True)
+ sys.stdout.flush()
+
+ if log is None:
+ log = Log.read()
+
+ sys.stdout.write(log)
+
+ @staticmethod
+ def print_path():
+ print(f'Path to {UNIT_LOG}:\n{Log.get_path()}\n')
+
+ @staticmethod
+ def read(*args, **kwargs):
+ with Log.open(*args, **kwargs) as file:
+ return file.read()
+
+ @staticmethod
def set_pos(pos, name=UNIT_LOG):
Log.pos[name] = pos
+ @staticmethod
def swap(name):
pos = Log.pos.get(UNIT_LOG, 0)
Log.pos[UNIT_LOG] = Log.pos.get(name, 0)
Log.pos[name] = pos
- def get_path(name=UNIT_LOG):
- return f'{Log.temp_dir}/{name}'
+ @staticmethod
+ def wait_for_record(pattern, name=UNIT_LOG, wait=150, flags=re.M):
+ with Log.open(name) as file:
+ for _ in range(wait):
+ found = re.search(pattern, file.read(), flags)
+
+ if found is not None:
+ break
+
+ time.sleep(0.1)
+
+ return found
diff --git a/test/unit/option.py b/test/unit/option.py
index cb3803dc..ee1f46dd 100644
--- a/test/unit/option.py
+++ b/test/unit/option.py
@@ -1,7 +1,15 @@
+import os
+import platform
+
+
class Options:
_options = {
+ 'architecture': platform.architecture()[0],
+ 'available': {'modules': {}, 'features': {}},
+ 'is_privileged': os.geteuid() == 0,
'skip_alerts': [],
'skip_sanitizer': False,
+ 'system': platform.system(),
}
def __setattr__(self, name, value):
diff --git a/test/unit/status.py b/test/unit/status.py
index 17416f17..84c958a3 100644
--- a/test/unit/status.py
+++ b/test/unit/status.py
@@ -1,9 +1,9 @@
-from unit.control import TestControl
+from unit.control import Control
class Status:
_status = None
- control = TestControl()
+ control = Control()
def _check_zeros():
assert Status.control.conf_get('/status') == {
diff --git a/test/unit/utils.py b/test/unit/utils.py
index 985801e2..cd823e27 100644
--- a/test/unit/utils.py
+++ b/test/unit/utils.py
@@ -24,7 +24,7 @@ def public_dir(path):
def waitforfiles(*files, timeout=50):
- for i in range(timeout):
+ for _ in range(timeout):
wait = False
for f in files:
@@ -41,10 +41,10 @@ def waitforfiles(*files, timeout=50):
def waitforglob(pattern, count=1, timeout=50):
- for i in range(timeout):
+ for _ in range(timeout):
n = 0
- for f in glob.glob(pattern):
+ for _ in glob.glob(pattern):
n += 1
if n == count:
@@ -56,7 +56,7 @@ def waitforglob(pattern, count=1, timeout=50):
def waitforsocket(port):
- for i in range(50):
+ for _ in range(50):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
try:
sock.settimeout(5)
@@ -90,19 +90,8 @@ def findmnt():
return out
-def sysctl():
- try:
- out = subprocess.check_output(
- ['sysctl', '-a'], stderr=subprocess.STDOUT
- ).decode()
- except FileNotFoundError:
- pytest.skip('requires sysctl')
-
- return out
-
-
def waitformount(template, timeout=50):
- for i in range(timeout):
+ for _ in range(timeout):
if findmnt().find(template) != -1:
return True
@@ -112,7 +101,7 @@ def waitformount(template, timeout=50):
def waitforunmount(template, timeout=50):
- for i in range(timeout):
+ for _ in range(timeout):
if findmnt().find(template) == -1:
return True
diff --git a/tools/README.md b/tools/README.md
index f534aa1f..1a631e10 100644
--- a/tools/README.md
+++ b/tools/README.md
@@ -35,6 +35,7 @@ web page with NGINX Unit.
|---------|-|
| filename … | Read configuration data consequently from the specified files instead of stdin.
| _HTTP method_ | It is usually not required to specify a HTTP method. `GET` is used to read the configuration. `PUT` is used when making configuration changes unless a specific method is provided.
+| `edit` | Opens **URI** in the default editor for interactive configuration. The [jq](https://stedolan.github.io/jq/) tool is required for this option.
| `INSERT` | A _virtual_ HTTP method that prepends data when the URI specifies an existing array. The [jq](https://stedolan.github.io/jq/) tool is required for this option.
| `-q` \| `--quiet` | No output to stdout.
diff --git a/tools/setup-unit b/tools/setup-unit
index de1d4f5f..38592fe3 100755
--- a/tools/setup-unit
+++ b/tools/setup-unit
@@ -30,16 +30,13 @@ test -v ZSH_VERSION \
export LC_ALL=C
-program_name="$0";
-prog_name="$(basename $program_name)";
-
dry_run='no';
help_unit()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name [-h] COMMAND [ARGS]
+ $0 [-h] COMMAND [ARGS]
Subcommands
├── repo-config [-hn] [PKG-MANAGER OS-NAME OS-VERSION]
@@ -49,7 +46,7 @@ DESCRIPTION
This script simplifies installing and configuring an NGINX Unit server
for first-time users.
- Run '$program_name COMMAND -h' for more information on a command.
+ Run '$0 COMMAND -h' for more information on a command.
COMMANDS
repo-config
@@ -75,11 +72,12 @@ help_more_unit()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name [-h] COMMAND [ARGS]
+ $0 [-h] COMMAND [ARGS]
Subcommands
├── cmd [-h]
├── ctl [-h] [-s SOCK] SUBCOMMAND [ARGS]
+ │   ├── edit [-h] PATH
│   ├── http [-h] [-c CURLOPT] METHOD PATH
│   └── insert [-h] PATH INDEX
├── freeport [-h]
@@ -87,6 +85,7 @@ SYNOPSIS
├── os-probe [-h]
├── ps [-h] [-t TYPE]
├── repo-config [-hn] [PKG-MANAGER OS-NAME OS-VERSION]
+ ├── restart [-hls]
├── sock [-h] SUBCOMMAND [ARGS]
│   ├── filter [-chs]
│   └── find [-h]
@@ -96,7 +95,7 @@ DESCRIPTION
This script simplifies installing and configuring
an NGINX Unit server for first-time users.
- Run '$program_name COMMAND -h' for more information on a command.
+ Run '$0 COMMAND -h' for more information on a command.
COMMANDS
cmd Print the invocation line of unitd(8).
@@ -137,12 +136,12 @@ __EOF__
warn()
{
- >&2 echo "$prog_name: error: $*";
+ >&2 echo "$(basename "$0"): error: $*";
}
err()
{
- >&2 echo "$prog_name: error: $*";
+ >&2 echo "$(basename "$0"): error: $*";
exit 1;
}
@@ -167,7 +166,7 @@ help_unit_cmd()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name cmd [-h]
+ $0 cmd [-h]
DESCRIPTION
Print the invocation line of running unitd(8) instances.
@@ -207,19 +206,22 @@ help_unit_ctl()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name ctl [-h] [-s SOCK] SUBCOMMAND [ARGS]
+ $0 ctl [-h] [-s SOCK] SUBCOMMAND [ARGS]
Subcommands
+ ├── edit [-h] PATH
├── http [-h] [-c CURLOPT] METHOD PATH
└── insert [-h] PATH INDEX
DESCRIPTION
Control a running unitd(8) instance through its control API socket.
- Run '$program_name ctl SUBCOMMAND -h' for more information on a
+ Run '$0 ctl SUBCOMMAND -h' for more information on a
subcommand.
SUBCOMMANDS
+ edit Edit the unitd(8) configuration with an editor.
+
http Send an HTTP request to the control API socket.
insert Insert an element at the specified index into an array in the
@@ -300,6 +302,10 @@ unit_ctl()
fi;
case $1 in
+ edit)
+ shift;
+ unit_ctl_edit ${remote:+ ---r $remote} ---s "$sock" $@;
+ ;;
http)
shift;
unit_ctl_http ${remote:+ ---r $remote} ---s "$sock" $@;
@@ -315,11 +321,115 @@ unit_ctl()
}
+help_unit_ctl_edit()
+{
+ cat <<__EOF__ ;
+SYNOPSIS
+ $0 ctl [CTL-OPTS] edit [-h] PATH
+
+DESCRIPTION
+ Edit the JSON configuration with an editor. The current configuration
+ is downloaded into a temporary file, open with the editor, and then
+ sent back to the control API socket.
+
+ The following editors are tried in this order of preference: \$VISUAL,
+ \$EDITOR, editor(1), vi(1), vim(1), ed(1).
+
+
+OPTIONS
+ -h, --help
+ Print this help.
+
+ENVIRONMENT
+ VISUAL
+ EDITOR
+ See environ(7).
+
+SEE ALSO
+ $0 ctl http -h;
+
+ update-alternatives(1)
+
+__EOF__
+}
+
+
+unit_ctl_edit()
+{
+ while test $# -ge 1; do
+ case "$1" in
+ -h | --help)
+ help_unit_ctl_edit;
+ exit 0;
+ ;;
+ ---r | ----remote)
+ local remote="$2";
+ shift;
+ ;;
+ ---s | ----sock)
+ local sock="$2";
+ shift;
+ ;;
+ -*)
+ err "ctl: edit: $1: Unknown option.";
+ ;;
+ *)
+ break;
+ ;;
+ esac;
+ shift;
+ done;
+
+ if ! test $# -ge 1; then
+ err 'ctl: insert: PATH: Missing argument.';
+ fi;
+ local req_path="$1";
+
+ if test -v remote; then
+ local remote_sock="$(echo "$sock" | unit_sock_filter -s)";
+ local local_sock="$(mktemp -u -p /var/run/unit/)";
+ local ssh_ctrl="$(mktemp -u -p /var/run/unit/)";
+
+ mkdir -p /var/run/unit/;
+
+ ssh -fMNnT -S "$ssh_ctrl" \
+ -o 'ExitOnForwardFailure yes' \
+ -L "$local_sock:$remote_sock" "$remote";
+
+ sock="unix:$local_sock";
+ fi;
+
+ local tmp="$(mktemp ||:)";
+
+ unit_ctl_http ---s "$sock" -c --no-progress-meter GET "$req_path" \
+ </dev/null >"$tmp" \
+ ||:;
+
+ $(
+ ((test -v VISUAL && test -n "$VISUAL") && printf '%s\n' "$VISUAL") \
+ || ((test -v EDITOR && test -n "$EDITOR") && printf '%s\n' "$EDITOR") \
+ || command -v editor \
+ || command -v vi \
+ || command -v vim \
+ || echo ed;
+ ) "$tmp" \
+ ||:;
+
+ unit_ctl_http ---s "$sock" PUT "$req_path" <"$tmp" \
+ ||:;
+
+ if test -v remote; then
+ ssh -S "$ssh_ctrl" -O exit "$remote" 2>/dev/null;
+ unlink "$local_sock";
+ fi;
+}
+
+
help_unit_ctl_http()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name ctl [CTL-OPTS] http [-h] [-c CURLOPT] METHOD PATH
+ $0 ctl [CTL-OPTS] http [-h] [-c CURLOPT] METHOD PATH
DESCRIPTION
Send an HTTP request to the unitd(8) control API socket.
@@ -341,7 +451,7 @@ ENVIRONMENT
Equivalent to the option -c (--curl).
EXAMPLES
- $program_name ctl http -c --no-progress-meter GET /config >tmp;
+ $0 ctl http -c --no-progress-meter GET /config >tmp;
SEE ALSO
<https://unit.nginx.org/controlapi/#api-manipulation>
@@ -424,7 +534,7 @@ help_unit_ctl_insert()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name ctl [CTL-OPTS] insert [-h] PATH INDEX
+ $0 ctl [CTL-OPTS] insert [-h] PATH INDEX
DESCRIPTION
Insert an element at the specified position (INDEX) into the JSON array
@@ -437,7 +547,7 @@ OPTIONS
Print this help.
SEE ALSO
- $program_name ctl http -h;
+ $0 ctl http -h;
__EOF__
}
@@ -514,7 +624,7 @@ help_unit_ctl_welcome()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name welcome [-hn]
+ $0 welcome [-hn]
DESCRIPTION
This script tests an NGINX Unit installation by creating an initial
@@ -676,7 +786,7 @@ unit_ctl_welcome()
<hr>
<p><a href="https://unit.nginx.org/?referer=welcome">NGINX Unit &mdash; the universal web app server</a><br>
- NGINX, Inc. &copy; 2022</p>
+ NGINX, Inc. &copy; 2023</p>
</body>
</html>
__EOF__';
@@ -720,7 +830,7 @@ help_unit_freeport()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name freeport [-h]
+ $0 freeport [-h]
DESCRIPTION
Print an available TCP port.
@@ -828,7 +938,7 @@ help_unit_json_ins()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name json-ins [-hn] JSON INDEX
+ $0 json-ins [-hn] JSON INDEX
ARGUMENTS
JSON Path to a JSON file containing a top-level array.
@@ -901,7 +1011,7 @@ help_unit_os_probe()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name os-probe [-h]
+ $0 os-probe [-h]
DESCRIPTION
This script probes the OS and prints three fields, delimited by ':';
@@ -978,7 +1088,7 @@ help_unit_ps()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name ps [-h] [-t TYPE]
+ $0 ps [-h] [-t TYPE]
DESCRIPTION
List unitd(8) processes.
@@ -1034,7 +1144,7 @@ unit_ps()
shift;
done;
- ps ax \
+ ps awwx \
| if test -v type; then
grep ${type_c:+-e 'unit: controller'} \
${type_m:+-e 'unit: main'} \
@@ -1051,7 +1161,7 @@ help_unit_repo_config()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name repo-config [-hn] [PKG-MANAGER OS-NAME OS-VERSION]
+ $0 repo-config [-hn] [PKG-MANAGER OS-NAME OS-VERSION]
DESCRIPTION
This script configures the NGINX Unit repository for the system
@@ -1082,11 +1192,11 @@ OPTIONS
what it does.
EXAMPLES
- $ $prog_name repo-config apt debian bullseye;
- $ $prog_name repo-config apt ubuntu jammy;
- $ $prog_name repo-config dnf fedora 36;
- $ $prog_name repo-config dnf rhel 9;
- $ $prog_name repo-config yum amzn2 2;
+ $ $(basename "$0") repo-config apt debian bullseye;
+ $ $(basename "$0") repo-config apt ubuntu jammy;
+ $ $(basename "$0") repo-config dnf fedora 36;
+ $ $(basename "$0") repo-config dnf rhel 9;
+ $ $(basename "$0") repo-config yum amzn2 2;
__EOF__
}
@@ -1222,11 +1332,101 @@ __EOF__";
}
+help_unit_restart()
+{
+ cat <<__EOF__ ;
+SYNOPSIS
+ $0 restart [-hls]
+
+DESCRIPTION
+ Restart all running unitd(8) instances.
+
+OPTIONS
+ -h, --help
+ Print this help.
+
+ -l, --log
+ Reset log file.
+
+ -s, --statedir
+ Reset \$statedir.
+
+CAVEATS
+ This command will ask for confirmation before removing
+ directories; please review those prompts with care, as unknown
+ bugs in the command may attempt to wipe your file system.
+
+__EOF__
+}
+
+
+unit_restart()
+{
+ while test $# -ge 1; do
+ case "$1" in
+ -h | --help)
+ help_unit_restart;
+ exit 0;
+ ;;
+ -l | --log)
+ local log_flag='yes';
+ ;;
+ -s | --statedir)
+ local state_flag='yes';
+ ;;
+ -*)
+ err "restart: $1: Unknown option.";
+ ;;
+ *)
+ err "restart: $1: Unknown argument.";
+ ;;
+ esac;
+ shift;
+ done;
+
+ local cmds="$(unit_cmd)";
+
+ pkill -e unitd;
+
+ printf '%s\n' "$cmds" \
+ | while read -r cmd; do
+ if test -v log_flag; then
+ (
+ echo "$cmd" \
+ | grep '\--log' \
+ | sed 's/.*--log \+\([^ ]\+\).*/\1/' \
+ || eval $cmd --help \
+ | grep -A1 '\--log FILE' \
+ | grep 'default:' \
+ | sed 's/.*"\(.*\)".*/\1/';
+ ) \
+ | xargs rm -f;
+ fi;
+
+ if test -v state_flag; then
+ (
+ echo "$cmd" \
+ | grep '\--statedir' \
+ | sed 's/.*--statedir \+\([^ ]\+\).*/\1/' \
+ || eval $cmd --help \
+ | grep -A1 '\--statedir DIR' \
+ | grep 'default:' \
+ | sed 's/.*"\(.*\)".*/\1/';
+ ) \
+ | xargs -I {} find {} -mindepth 1 -maxdepth 1 \
+ | xargs rm -rfi;
+ fi;
+
+ eval $cmd;
+ done;
+}
+
+
help_unit_sock()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name sock [-h] SUBCOMMAND [ARGS]
+ $0 sock [-h] SUBCOMMAND [ARGS]
Subcommands
├── filter [-ch]
@@ -1236,7 +1436,7 @@ DESCRIPTION
Print the control API socket address of running unitd(8)
instances.
- Run '$program_name sock SUBCOMMAND -h' for more information on a
+ Run '$0 sock SUBCOMMAND -h' for more information on a
subcommand.
SUBCOMMANDS
@@ -1297,7 +1497,7 @@ help_unit_sock_filter()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name sock filter [-chs]
+ $0 sock filter [-chs]
DESCRIPTION
Filter the output of the 'sock find' command and transform it to
@@ -1376,7 +1576,7 @@ help_unit_sock_find()
{
cat <<__EOF__ ;
SYNOPSIS
- $program_name sock find [-h]
+ $0 sock find [-h]
DESCRIPTION
Find and print the control API socket address of running
@@ -1481,6 +1681,10 @@ repo-config)
shift;
unit_repo_config $@;
;;
+restart)
+ shift;
+ unit_restart $@;
+ ;;
sock)
shift;
unit_sock $@;
diff --git a/tools/unitc b/tools/unitc
index 9973e62d..877e11d4 100755
--- a/tools/unitc
+++ b/tools/unitc
@@ -1,7 +1,7 @@
#!/bin/bash
# unitc - a curl wrapper for configuring NGINX Unit
# https://github.com/nginx/unit/tree/master/tools
-# NGINX, Inc. (c) 2022
+# NGINX, Inc. (c) 2023
# Defaults
#
@@ -32,7 +32,7 @@ while [ $# -gt 0 ]; do
shift
;;
- "GET" | "PUT" | "POST" | "DELETE" | "INSERT")
+ "GET" | "PUT" | "POST" | "DELETE" | "INSERT" | "EDIT")
METHOD=$OPTION
shift
;;
@@ -71,6 +71,7 @@ USAGE: ${0##*/} [options] URI
General options
filename … # Read configuration data from files instead of stdin
HTTP method # Default=GET, or PUT with config data (case-insensitive)
+ EDIT # Opens the URI contents in \$EDITOR
INSERT # Virtual HTTP method to prepend data to an existing array
-q | --quiet # No output to stdout
@@ -129,12 +130,23 @@ if [ $REMOTE -eq 0 ]; then
exit 1
fi
- # Get control address
+ # Obtain any optional startup parameters from the 'unitd: main' process
+ # so we can get the actual control address and error log location.
+ # Command line options and output of ps(1) is notoriously variable across
+ # different *nix/BSD platforms so multiple attempts might be needed.
#
- PARAMS=$(ps $PID | grep unitd | cut -f2- -dv | tr '[]' ' ' | cut -f3- -d ' ' | sed -e 's/ --/\n--/g')
+ PARAMS=$((ps -wwo args=COMMAND -p $PID || ps $PID) 2> /dev/null | grep unit | tr '[]' ^ | cut -f2 -d^ | sed -e 's/ --/\n--/g')
+ if [ "$PARAMS" = "" ]; then
+ echo "${0##*/}: WARNING: unable to identify unitd command line parameters for PID $PID, assuming unitd defaults from \$PATH"
+ PARAMS=unitd
+ fi
CTRL_ADDR=$(echo "$PARAMS" | grep '\--control' | cut -f2 -d' ')
if [ "$CTRL_ADDR" = "" ]; then
- CTRL_ADDR=$($(echo "$PARAMS" | grep unitd) --help | grep -A1 '\--control' | tail -1 | cut -f2 -d\")
+ CTRL_ADDR=$($(echo "$PARAMS") --help | grep -A1 '\--control' | tail -1 | cut -f2 -d\")
+ fi
+ if [ "$CTRL_ADDR" = "" ]; then
+ echo "${0##*/}: ERROR: cannot detect control socket. Did you start unitd with a relative path? Try starting unitd with --control option."
+ exit 2
fi
# Prepare for network or Unix socket addressing
@@ -156,7 +168,11 @@ if [ $REMOTE -eq 0 ]; then
#
ERROR_LOG=$(echo "$PARAMS" | grep '\--log' | cut -f2 -d' ')
if [ "$ERROR_LOG" = "" ]; then
- ERROR_LOG=$($(echo "$PARAMS" | grep unitd) --help | grep -A1 '\--log' | tail -1 | cut -f2 -d\")
+ ERROR_LOG=$($(echo "$PARAMS") --help | grep -A1 '\--log' | tail -1 | cut -f2 -d\")
+ fi
+ if [ "$ERROR_LOG" = "" ]; then
+ echo "${0##*/}: WARNING: cannot detect unit log file (will not be monitored). If you started unitd from a relative path then try using the --log option."
+ ERROR_LOG=/dev/null
fi
# Cache the discovery for this unit PID (and cleanup any old files)
@@ -190,6 +206,29 @@ fi
if [ -t 0 ] && [ ${#CONF_FILES[@]} -eq 0 ]; then
if [ "$METHOD" = "DELETE" ]; then
$SSH_CMD curl -X $METHOD $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT
+ elif [ "$METHOD" = "EDIT" ]; then
+ EDITOR=$(test "$EDITOR" && printf '%s' "$EDITOR" || command -v editor || command -v vim || echo vi)
+ EDIT_FILENAME=/tmp/${0##*/}.$$${URI//\//_}
+ $SSH_CMD curl -fsS $UNIT_CTRL$URI > $EDIT_FILENAME || exit 2
+ if [ "${URI:0:12}" = "/js_modules/" ]; then
+ if ! hash jq 2> /dev/null; then
+ echo "${0##*/}: ERROR: jq(1) is required to edit JavaScript modules; install at <https://stedolan.github.io/jq/>"
+ exit 1
+ fi
+ jq -r < $EDIT_FILENAME > $EDIT_FILENAME.js # Unescape linebreaks for a better editing experience
+ EDIT_FILE=$EDIT_FILENAME.js
+ $EDITOR $EDIT_FILENAME.js || exit 2
+ # Remove the references, delete old config, push new config+reference
+ $SSH_CMD curl -fsS $UNIT_CTRL/config/settings/js_module > /tmp/${0##*/}.$$_js_module && \
+ $SSH_CMD curl -X DELETE $UNIT_CTRL/config/settings/js_module && \
+ $SSH_CMD curl -fsSX DELETE $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \
+ printf "%s" "$(< $EDIT_FILENAME.js)" | $SSH_CMD curl -fX PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \
+ $SSH_CMD curl -X PUT --data-binary @/tmp/${0##*/}.$$_js_module $UNIT_CTRL/config/settings/js_module 2> /tmp/${0##*/}.$$
+ else
+ tr -d '\r' < $EDIT_FILENAME > $EDIT_FILENAME.json # Remove carriage-return from newlines
+ $EDITOR $EDIT_FILENAME.json || exit 2
+ $SSH_CMD curl -X PUT --data-binary @$EDIT_FILENAME.json $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT
+ fi
else
SHOW_LOG=$(echo $URI | grep -c ^/control/)
$SSH_CMD curl $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT
@@ -225,7 +264,7 @@ if [ $CURL_STATUS -ne 0 ]; then
fi
exit 4
fi
-rm -f /tmp/${0##*/}.$$ 2> /dev/null
+rm -f /tmp/${0##*/}.$$* 2> /dev/null
if [ $SHOW_LOG -gt 0 ] && [ $NOLOG -eq 0 ] && [ $QUIET -eq 0 ]; then
echo -n "${0##*/}: Waiting for log..."
diff --git a/version b/version
index 63edb5c3..c08b9fd5 100644
--- a/version
+++ b/version
@@ -1,5 +1,5 @@
# Copyright (C) NGINX, Inc.
-NXT_VERSION=1.30.0
-NXT_VERNUM=13000
+NXT_VERSION=1.31.0
+NXT_VERNUM=13100