diff options
281 files changed, 14048 insertions, 2698 deletions
diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..86ae7df5 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,31 @@ +root = true + +[{configure,{*.{c,cpp,h,go,java,js,py,rs}}] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_style = space +indent_size = 4 + +[Makefile] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_style = tab +indent_size = 8 + +[{auto/**,*.toml}] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.yaml] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_style = space +indent_size = 2 diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..142cbb85 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily diff --git a/.github/workflows/check-whitespace.yaml b/.github/workflows/check-whitespace.yaml new file mode 100644 index 00000000..75f0afe4 --- /dev/null +++ b/.github/workflows/check-whitespace.yaml @@ -0,0 +1,48 @@ +name: Check Whitespace + +# Get the repo with the commits(+1) in the series. +# Process `git log --check` output to extract just the check errors. + +on: + pull_request: + types: [ opened, synchronize ] + +jobs: + check-whitespace: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: git log --check + id: check_out + run: | + log= + commit= + while read dash etc + do + case "${dash}" in + "---") + commit="${etc}" + ;; + "") + ;; + *) + if test -n "${commit}" + then + log="${log}\n${commit}" + echo "" + echo "--- ${commit}" + fi + commit= + log="${log}\n${dash} ${etc}" + echo "${dash} ${etc}" + ;; + esac + done <<< $(git log --check --pretty=format:"--- %h %s" ${{github.event.pull_request.base.sha}}..) + + if test -n "${log}" + then + exit 2 + fi diff --git a/.github/workflows/ci-dev-distro-compiler.yaml b/.github/workflows/ci-dev-distro-compiler.yaml new file mode 100644 index 00000000..8b7f53b7 --- /dev/null +++ b/.github/workflows/ci-dev-distro-compiler.yaml @@ -0,0 +1,177 @@ +name: "CI - Fedora Rawhide / Alpine Edge / GCC / Clang" + +on: + push: + branches: master + paths: + - configure + - 'auto/**' + - 'src/**' + - 'test/**' + - '.github/workflows/ci-dev-distro-compiler.yaml' + pull_request: + branches: master + paths: + - configure + - 'auto/**' + - 'src/**' + - 'test/**' + - '.github/workflows/ci-dev-distro-compiler.yaml' + +jobs: + + fedora-rawhide: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + compiler: [ 'gcc', 'clang' ] + + container: + image: fedora:rawhide + + steps: + - name: Install tools/deps + run: | + dnf -y update + dnf -y install --setopt=install_weak_deps=False \ + which wget git gcc make pcre2-devel openssl-devel \ + python-unversioned-command python3 python3-devel \ + php-devel php-embedded perl-devel perl-ExtUtils-Embed \ + ruby-devel java-devel nodejs-devel nodejs-npm golang + if [ "${{ matrix.compiler }}" = "clang" ]; then + dnf -y install --setopt=install_weak_deps=False clang + fi + npm install -g node-gyp + + - uses: actions/checkout@v4 + + - name: configure unit CC=${{ matrix.compiler }} + run: | + if [ "${{ matrix.compiler }}" = "clang" ]; then + ./configure --openssl --cc=clang + else + ./configure --openssl + fi + + - name: make unit + run: make -j 4 + + - name: configure unit-php + run: ./configure php + + - name: make unit-php + run: make -j 4 php + + - name: configure unit-python + run: ./configure python + + - name: make unit-python + run: make -j 4 python + + - name: configure unit-perl + run: ./configure perl + if: matrix.compiler == 'gcc' + + - name: make unit-perl + run: make -j 4 perl + if: matrix.compiler == 'gcc' + + - name: configure unit-ruby + run: ./configure ruby + + - name: make unit-ruby + run: make -j 4 ruby + + - name: configure unit-java + run: ./configure java + + - name: make unit-java + run: make -j 4 java + + - name: configure unit-nodejs + run: ./configure nodejs + + - name: make unit-nodejs + run: make node-local-install DESTDIR=node + + - name: configure unit-go + run: ./configure go --go-path= + + - name: make unit-go + run: make go-install + + - name: Install wasmtime + run: | + wget -O- https://github.com/bytecodealliance/wasmtime/releases/download/v20.0.0/wasmtime-v20.0.0-x86_64-linux-c-api.tar.xz | tar -xJf - + + - name: configure unit-wasm + run: ./configure wasm --include-path=wasmtime-v20.0.0-x86_64-linux-c-api/include --lib-path=wasmtime-v20.0.0-x86_64-linux-c-api/lib --rpath + + - name: make unit-wasm + run: make wasm + + alpine-edge: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + compiler: [ 'gcc', 'clang' ] + + container: + image: alpine:edge + + steps: + - name: Install tools/deps + run: | + apk update && apk upgrade + apk add gcc make musl-dev openssl-dev pcre2-dev curl \ + php83-dev php83-embed python3-dev perl-dev ruby-dev openjdk21-jdk + if [ "${{ matrix.compiler }}" = "clang" ]; then + apk add clang + fi + + - uses: actions/checkout@v4 + + - name: configure unit CC=${{ matrix.compiler }} + run: | + if [ "${{ matrix.compiler }}" = "clang" ]; then + ./configure --openssl --cc=clang + else + ./configure --openssl + fi + + - name: make unit + run: make -j 4 + + - name: configure unit-php + run: ln -s /usr/lib/libphp83.so /usr/lib/libphp.so && ./configure php + + - name: make unit-php + run: make -j 4 + + - name: configure unit-python + run: ./configure python + + - name: make unit-python + run: make -j 4 + + - name: configure unit-perl + run: ./configure perl + + - name: make unit-perl + run: make -j 4 perl + + - name: configure unit-ruby + run: ./configure ruby + + - name: make unit-ruby + run: make -j 4 ruby + + - name: configure unit-java + run: ./configure java + + - name: make unit-java + run: make -j 4 java diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5368ae9..0f9bc699 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,9 +2,24 @@ name: ci on: pull_request: + paths: + - configure + - 'auto/**' + - 'go/**' + - 'src/**' + - 'test/**' + - 'pkg/contrib/**' + - '.github/workflows/ci.yml' push: - branches: - - master + branches: master + paths: + - configure + - 'auto/**' + - 'go/**' + - 'src/**' + - 'test/**' + - 'pkg/contrib/**' + - '.github/workflows/ci.yml' jobs: test: @@ -49,32 +64,22 @@ jobs: os: ubuntu-latest - build: wasm os: ubuntu-latest + - build: wasm-wasi-component + os: ubuntu-latest steps: - uses: actions/checkout@v4 - # Creates and outputs directories used by tests (/usr/local is unfriendly) - - name: Configure directories - id: dir - run: | - PREFIX=${HOME}/.unit - BIN=${PREFIX}/bin - VAR=${PREFIX}/var - mkdir -p $BIN - mkdir -p $VAR - - echo "prefix=${PREFIX}" >> "$GITHUB_OUTPUT" - echo "bin=${BIN}" >> "$GITHUB_OUTPUT" - echo "bin=${BIN}" >> "$GITHUB_PATH" - echo "var=${VAR}" >> "$GITHUB_OUTPUT" - cat "$GITHUB_OUTPUT" - # Provides module, language version and testpath from build name - name: Output build metadata id: metadata run: | - # Split the build name by '-' into module and version - IFS='-' read -r module version <<< "${{ matrix.build }}" + if [ "${{ matrix.build }}" = "wasm-wasi-component" ]; then + module="wasm-wasi-component" + else + # Split the build name by '-' into module and version + IFS='-' read -r module version <<< "${{ matrix.build }}" + fi testpath="test/test_${module}*" @@ -127,15 +132,6 @@ jobs: - name: Configure unit run: | ./configure \ - --prefix=${{ steps.dir.outputs.prefix }} \ - --sbindir=${{ steps.dir.outputs.bin }} \ - --logdir=${{ steps.dir.outputs.var }}/log \ - --log=${{ steps.dir.outputs.var }}/log/unit/unit.log \ - --runstatedir=${{ steps.dir.outputs.var }}/run \ - --pid=${{ steps.dir.outputs.var }}/run/unit/unit.pid \ - --control=unix:${{ steps.dir.outputs.var }}/run/unit/control.sock \ - --modules=${{ steps.dir.outputs.prefix }}/lib/unit/modules \ - --statedir=${{ steps.dir.outputs.var }}/state/unit \ --tests \ --openssl \ --njs \ @@ -179,12 +175,12 @@ jobs: - name: Configure java run: | - ./configure java + sudo ./configure java if: steps.metadata.outputs.module == 'java' - name: Make java run: | - make java + sudo make java if: steps.metadata.outputs.module == 'java' ## @@ -266,12 +262,12 @@ jobs: - name: Configure python3 run: | - ./configure python --config=python3-config + sudo ./configure python --config=python3-config if: steps.metadata.outputs.module == 'python' - name: Make python3 run: | - make python3 + sudo make python3 if: steps.metadata.outputs.module == 'python' ## @@ -309,7 +305,7 @@ jobs: - name: Configure wasm run: | - ./configure wasm --include-path=pkg/contrib/wasmtime/crates/c-api/include --lib-path=pkg/contrib/wasmtime/target/release + ./configure wasm --include-path=pkg/contrib/wasmtime/artifacts/include --lib-path=pkg/contrib/wasmtime/artifacts/lib if: steps.metadata.outputs.module == 'wasm' - name: Make wasm @@ -318,9 +314,38 @@ jobs: if: steps.metadata.outputs.module == 'wasm' ## + ## wasm-wasi-component + ## + + - name: Setup rust + run: | + curl https://sh.rustup.rs | sh -s -- -y + cargo install cargo-component + if: steps.metadata.outputs.module == 'wasm-wasi-component' + + - name: Configure wasm-wasi-component + run: | + ./configure wasm-wasi-component + if: steps.metadata.outputs.module == 'wasm-wasi-component' + + - name: Make wasm-wasi-component + run: | + CLANG_PATH=/usr/bin/clang-15 \ + BINDGEN_EXTRA_CLANG_ARGS="-I../../njs/src -I../../njs/build" \ + make wasm-wasi-component + if: steps.metadata.outputs.module == 'wasm-wasi-component' + + ## ## Tests ## + # /home/runner will be root only after calling sudo above + # Ensure all users and processes can execute + - name: Fix permissions + run: | + sudo chmod -R +x /home/runner + namei -l ${{ github.workspace }} + # Install python3 if not present - uses: actions/setup-python@v5 with: @@ -329,11 +354,18 @@ jobs: - name: Install pytest run: | - pip install pytest + if [ "${{ matrix.build }}" == "wasm-wasi-component" ]; then + pip install pytest + else + sudo -H pip install pytest + fi if: steps.metadata.outputs.module != 'wasm' - name: Run ${{ steps.metadata.outputs.module }} tests run: | - pytest --print-log ${{ steps.metadata.outputs.testpath }} - # Skip pytest if wasm build, as there are no tests yet + if [ "${{ matrix.build }}" == "wasm-wasi-component" ]; then + pytest --print-log ${{ steps.metadata.outputs.testpath }} + else + sudo -E pytest --print-log ${{ steps.metadata.outputs.testpath }} + fi if: steps.metadata.outputs.module != 'wasm' diff --git a/.github/workflows/cifuzz.yml b/.github/workflows/cifuzz.yml new file mode 100644 index 00000000..dc89c0b2 --- /dev/null +++ b/.github/workflows/cifuzz.yml @@ -0,0 +1,41 @@ +name: CIFuzz +on: + pull_request: + paths: + - 'src/**' + - 'fuzzing/**' + - '.github/workflows/cifuzz.yml' + +permissions: {} +jobs: + Fuzzing: + runs-on: ubuntu-latest + permissions: + security-events: write + steps: + - name: Build Fuzzers + id: build + uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master + with: + oss-fuzz-project-name: 'unit' + language: c + - name: Run Fuzzers + uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master + with: + oss-fuzz-project-name: 'unit' + language: c + fuzz-seconds: 300 + output-sarif: true + - name: Upload Crash + uses: actions/upload-artifact@v3 + if: failure() && steps.build.outcome == 'success' + with: + name: artifacts + path: ./out/artifacts + - name: Upload Sarif + if: always() && steps.build.outcome == 'success' + uses: github/codeql-action/upload-sarif@v3 + with: + # Path to SARIF file relative to the root of the repository + sarif_file: cifuzz-sarif/results.sarif + checkout_path: cifuzz-sarif diff --git a/.github/workflows/unitctl.yml b/.github/workflows/unitctl.yml new file mode 100644 index 00000000..7664ab0b --- /dev/null +++ b/.github/workflows/unitctl.yml @@ -0,0 +1,205 @@ + +name: unitctl + +on: + pull_request: + paths: + - tools/unitctl/** + - docs/unit-openapi.yaml + push: + branches: + - master + tags: + - '[0-9]+.[0-9]+.[0-9]+' + workflow_dispatch: + inputs: + version: + type: string + description: "Semver tag" + required: true + +permissions: + contents: write + +jobs: + test: + runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: tools/unitctl + env: + MAKE: make + CARGO: cargo + VERSION: + SHORT_VERSION: + strategy: + fail-fast: false + matrix: + include: + - build: linux-x86_64 + os: ubuntu-latest + target: x86_64-unknown-linux-gnu + - build: macos-aarch64 + os: macos-latest + target: aarch64-apple-darwin + steps: + - uses: actions/checkout@v4 + + - run: rustup update stable + - run: rustup target add ${{ matrix.target }} + + - name: Install cross + if: matrix.target == 'aarch64-unknown-linux-gnu' + uses: taiki-e/install-action@v2 + with: + tool: cross + + - name: Install macOS depedencies + if: startsWith(matrix.os, 'macos') + run: | + brew install make gnu-sed grep gawk + echo "MAKE=gmake" >> $GITHUB_ENV + + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: 21 + + - run: ${{ env.MAKE }} list-targets + + - name: Generate openapi + run: ${{ env.MAKE }} openapi-generate + - name: Test ${{ matrix.os }} + run: ${{ env.MAKE }} test + + build: + runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: tools/unitctl + env: + MAKE: make + CARGO: cargo + VERSION: + SHORT_VERSION: + strategy: + fail-fast: false + matrix: + include: + - build: linux-aarch64 + os: ubuntu-latest + target: aarch64-unknown-linux-gnu + - build: linux-x86_64 + os: ubuntu-latest + target: x86_64-unknown-linux-gnu + - build: macos-aarch64 + os: macos-latest + target: aarch64-apple-darwin + - build: macos-x86_64 + os: macos-latest + target: x86_64-apple-darwin + + steps: + - uses: actions/checkout@v4 + + - run: rustup update stable + - run: rustup target add ${{ matrix.target }} + + - name: Install cross + if: matrix.target == 'aarch64-unknown-linux-gnu' + uses: taiki-e/install-action@v2 + with: + tool: cross + + - uses: Swatinem/rust-cache@v2 + with: + prefix-key: rust-${{ matrix.build }} + workspaces: ./tools/unitctl -> target + save-if: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/') }} + + - name: Configure linux arm dependencies + if: matrix.target == 'aarch64-unknown-linux-gnu' + run: | + cat <<EOF > Cross.toml + [target.aarch64-unknown-linux-gnu] + pre-build = [ + "dpkg --add-architecture \$CROSS_DEB_ARCH", + "apt-get update && apt-get install --assume-yes libssl-dev:\$CROSS_DEB_ARCH" + ] + EOF + + cat Cross.toml + echo "CARGO=cross" >> $GITHUB_ENV + + - name: Install macOS dependencies + if: startsWith(matrix.os, 'macos') + run: | + brew install make gnu-sed grep gawk + echo "MAKE=gmake" >> $GITHUB_ENV + + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: 21 + + - run: ${{ env.MAKE }} list-targets + + - name: Make unitctl (${{ env.MAKE }}, ${{ matrix.target }}) + run: ${{ env.MAKE }} ${{ matrix.target }} + + - name: Get the version from the tag + run: | + version=${version:=${{ github.ref_name }}} + short="${version#*/}" + echo $version; echo $short + echo "VERSION=$version" >> $GITHUB_ENV + echo "SHORT_VERSION=$short" >> $GITHUB_ENV + + - name: Generate sha256 sum + run: | + shasum -a 256 ./target/${{ matrix.target }}/release/unitctl > unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }}.sha256 + mv ./target/${{ matrix.target }}/release/unitctl unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }} + + - name: Upload sha256 sum + uses: actions/upload-artifact@v4 + with: + name: unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }}.sha256 + path: tools/unitctl/unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }}.sha256 + + - name: Upload unitctl + uses: actions/upload-artifact@v4 + with: + name: unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }} + path: tools/unitctl/unitctl-${{ env.SHORT_VERSION }}-${{ matrix.target }} + + release: + # Create a draft release if a tag + if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'workflow_dispatch' + needs: [build] + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Create GitHub release + uses: ncipollo/release-action@v1 + with: + artifacts: "unitctl-*" + prerelease: ${{ github.event_name == 'workflow_dispatch' }} + tag: ${{ inputs.version && format('unitctl/{0}', inputs.version) || github.ref_name }} + body: > + ## Unitctl + + This is a released binary of unitctl. + + Unitctl is an official command line tool for managing Unit installations. + + + ## Unit + + For the current release of the NGINX Unit application server check the + [Unit Installation Guide](https://unit.nginx.org/installation/) and the + [Unit Quickstart Guide](https://github.com/nginx/unit/). + + allowUpdates: true diff --git a/.hgignore b/.hgignore deleted file mode 100644 index d4f37fde..00000000 --- a/.hgignore +++ /dev/null @@ -1,6 +0,0 @@ -^build/ -^Makefile$ -\.pyc$ -\.cache$ -\.pytest_cache$ -__pycache__/ diff --git a/.hgtags b/.hgtags deleted file mode 100644 index f5ecba72..00000000 --- a/.hgtags +++ /dev/null @@ -1,76 +0,0 @@ -f9d308f3fceba164f2326fb2ad242339157cb162 0.1 -b09757e4984e70439168af3501ac1787d0276368 0.2 -b8400e8feb36f29a6b3f5f58b1473b040720490c 0.3 -6071f4300f76e2b879e9cc7fd431e5e119077f16 0.4 -1ba4d13d222bc62caa5b1b42cf5ad50a9cf47f64 0.5 -88831b81e384982223fa01f2f29cbb8642846a80 0.6 -d2fcec5b0fa3b0e8da1945aa240d23be3bf94309 0.7 -5870dd420309fe2584abc4ca8d63168ad9141274 1.0 -3f710b55c226c7579d4e737198c2d8a9b7d67518 1.1 -fbe7f5a3867e9559ef0884786fe5150aa60414e6 1.2 -b3cf22b8a17e0e35ca80decb03ed2cceb662c3de 1.3 -8f4524a9cf87fbddf626302da071f5055cf33f28 1.4 -99d69d59aa5edee3405d304886d9319291e392ba 1.4-2 -b3dee0cc5a4edd046345511769b5cfec49044f1c 1.5 -e507438883ef0044c278f1accfc7bc7f90c0ffb6 1.5-1 -d411e7fdee9e03036adb652f8d9f4c45a420bdd5 1.6 -01160bbced577121cb14d0b86ec1f8bb764cfab2 1.6-1 -784b45adb0fe8bdd707510f59ed18309087e5c21 1.7 -abb8cfb421f608df1c23f5c333c5f049a79a681a 1.7-1 -0f04ef991fbc1dadbc590ab7fb229d4f3d6357bc 1.7.1 -fe0d5eb09b66e77a2b66455faa51d3fa04146d3d 1.7.1-1 -0a18a14d169f156f8e2daca35aa86d5a6dd9b1ae 1.8.0 -f47fc64d3d9e3dedb95042e93c7f73b31f458338 1.8.0-1 -dda6319de785dc2d225d818349aba56fc48d47f6 1.9.0 -c927a739754ade5ecf7be8da30f6c42446e72d8c 1.9.0-1 -cdbba3c3e3762eacc308a5407877c3665a05058d 1.10.0 -01322f5ec4e37fd0fd8d9d3e491e8539341160f3 1.10.0-1 -b651ff72ffe080835f884a1ace8fa24eb33e3569 1.10.0-2 -3b1601ac0f2f53fed4cae01b9db0e4e070665cae 1.11.0 -7d54dfd170985c2ae8281219cfe7aa19de6d0de0 1.11.0-1 -c27c08b0deeee58676f3880f0f315388ce5d9322 1.11.0-2 -b391df5f0102aa6afe660cfc863729c1b1111c9e 1.12.0 -c1625c52dd6444ed613348719fbb54c7abcc6619 1.12.0-1 -3313bf222e6e0a91213946dfcbd70bb5079f4cef 1.13.0 -439bd957eeb48dbbffa4cd7eecf3829d497f69b0 1.13.0-1 -6e28966ed1f26e119bf333229ea5e6686c60a469 1.14.0 -7b483cf5cb0971067a4960e5e8a20a81de14f337 1.14.0-1 -801ac82f80fb2b2333f2c03ac9c3df6b7cec130a 1.15.0 -a3ea27be5ebde3d54e06cf6fe613cb4b53ab76d2 1.15.0-1 -8bab088952dd9d7caa3d04fd4b3026cef26fcf7d 1.16.0 -0361f3eda67a17295ab324c831cb9d8c560286ed 1.16.0-1 -4b13438632bc37ca599113be90af64f6e2f09d83 1.17.0 -e0658022962c0d5b0a32a1b0e3090bdec328e3da 1.17.0-1 -9e14c63773be52613dd47dea9fd113037f15a3eb 1.18.0 -de07e42484ecc595050fcbd3581a64cc6b1c1de5 1.18.0-1 -86cdf66f82745d8db35345368dcdb38c79a4f03a 1.19.0 -79f364e9aa907b1d7768e0e6686ce0a80fe61f44 1.19.0-1 -0e985b30067380782125f1c479eda4ef909418df 1.20.0 -29efab062b4e5d8a34b13724feb05c1890d738e8 1.20.0-1 -f804aaf7eee10a7d8116820840d6312dd4914a41 1.21.0 -e3f504b6082ee97ed0d6c8660890585ef6a5796f 1.21.0-1 -331bdadeca30a49dd11b86af99124c2ffeb22d05 1.22.0 -86b359acc93fe53f1f21b22abc8d1b40ca26158c 1.22.0-1 -49ee24c03f5749f8a1b69dc1c600ad48517d9d7a 1.23.0 -ad6aad2450c256d4f1a3c32f7091a78dbbc4a6d1 1.23.0-1 -847c88d10f26765b45149c14f88c2274adfc3f42 1.24.0 -5c7ce0da580ef6e83c729dd012e976f22acbac27 1.24.0-1 -54ffe5ce4fb3c4304faf6d342d9b17dee2c745ac 1.25.0 -aa207ced9712132040e6153ceccdaf04c112d02c 1.25.0-1 -2be7b623fbfafdb470d832a28abb1cd55c76e04f 1.26.0 -1613ff17671df2273d48be57115f5fe4827f8cd5 1.26.0-1 -1a08f884b24effa8b843d6aeeaf016b6354d1256 1.26.1 -069c16dd4ed34d49584028b25f5cba4a4a2eded6 1.26.1-1 -8a9055cbe4ffd450fac4d7a849c00e0db5485ad3 1.27.0 -e7df1cfec95945265a33f7085fbd313b4dd207fd 1.27.0-1 -ea073fb3cb75abfb4be5dc12402de73e0c20da60 1.28.0 -16e01c5fead470d8f630613b973a50a33fd0d0f2 1.28.0-1 -37cac7fec92e5656d8a03a8594ade131c3391f45 1.29.0 -2927b72847d44913233fa303438a14ec313d860e 1.29.0-1 -fa0227b7f62691a186d752ace475868de49e9fce 1.29.1 -e7b7f2bb04e8c6f4cbe6374fd6960d4465654215 1.29.1-1 -2692a5823c403a4e209681943e32a4907317d14b 1.30.0 -8a0b4338a15648792bcad47edb53f1b1c0badeb4 1.30.0-1 -3a9046dca2a6c51ee2df2cabdf69cb9a83e7a1e6 1.31.0 -ef8ddca63f2c1a3e1758968740837b921dd953dc 1.31.0-1 -25aafe2ff61e0424b3245f4e3d40eb1fa7611063 1.31.1 @@ -6,9 +6,11 @@ Andrei Zeliankou <zelenkov@nginx.com> <xim.andrew@gmail.com> Andrew Clayton <a.clayton@nginx.com> <andrew@digital-domain.net> Andrew Clayton <a.clayton@nginx.com> <a.clayton@f5.com> Artem Konev <artem.konev@nginx.com> <41629299+artemkonev@users.noreply.github.com> +Ava Hahn <a.hahn@f5.com> <110854134+avahahn@users.noreply.github.com> Dan Callahan <d.callahan@f5.com> <dan.callahan@gmail.com> Danielle De Leo <d.deleo@f5.com> <danielle@fastmail.net> Dylan Arbour <d.arbour@f5.com> <arbourd@users.noreply.github.com> +Dylan Arbour <d.arbour@f5.com> <7211830+arbourd@users.noreply.github.com> Konstantin Pavlov <thresh@nginx.com> <thresh@videolan.org> Konstantin Pavlov <thresh@nginx.com> <pavlov.konstantin@gmail.com> Max Romanov <max.romanov@gmail.com> <max.romanov@nginx.com> @@ -1,4 +1,61 @@ +Changes with Unit 1.33.0 17 Sep 2024 + + *) Feature: make the number of router threads configurable. + + *) Feature: make the listen(2) backlog configurable. + + *) Feature: add Python application factory support. + + *) Feature: add experimental chunked request body support. (Disabled by + default). + + *) Feature: add fuzzing via oss-fuzz. + + *) Feature: add "if" option to the "match" object. + + *) Feature: show list of loaded language modules in the /status + endpoint. + + *) Feature: Unit ships with a new Rust based CLI application "unitctl". + + *) Feature: the wasm-wasi-component language module now inherits the + processes environment. + + *) Change: under systemd unit runs in forking mode (once again). + + *) Change: if building with njs, version 0.8.3 or later is now required. + + *) Change: Unit now builds with -std=gnu11 (C11 with GNU extensions). + + *) Change: Unit now creates the full directory path for the PID file and + control socket. + + *) Change: build system improvements, including pretty printing the make + output and enabling various make variables to influence the build + process (see: make help). + + *) Change: better detection of available runnable CPUs on Linux. + + *) Change: default listen(2) backlog on Linux now defaults to Kernel + default. + + *) Bugfix: don't modify REQUEST_URI. + + *) Bugfix: fix a crash when interrupting a download via a proxy. + + *) Bugfix: wasm-wasi-component application process hangs after receiving + restart signal from the control endpoint. + + *) Bugfix: njs variables accessed with a JS template literal should not + be cacheable. + + *) Bugfix: properly handle deleting arrays of certificates. + + *) Bugfix: don't create the $runstatedir directory which triggered an + Alpine packaging error. + + Changes with Unit 1.32.1 26 Mar 2024 *) Bugfix: NJS variables in templates may have incorrect values due to @@ -15,7 +72,7 @@ Changes with Unit 1.32.0 27 Feb 2024 *) Feature: conditional access logging. - *) Feature: NJS variables access. + *) Feature: njs variables access. *) Feature: $request_id variable contains a string that is formed using random data and can be used as a unique request identifier. @@ -114,13 +171,13 @@ Changes with Unit 1.30.0 10 May 2023 *) Feature: basic URI rewrite support. - *) Feature: NJS loadable modules support. + *) Feature: njs loadable modules support. *) Feature: per-application logging. *) Feature: conditional logging of route selection. - *) Feature: support the keys API on the request objects in NJS. + *) Feature: support the keys API on the request objects in njs. *) Feature: default values for 'make install' pathnames such as prefix; this allows to './configure && make && sudo make install'. @@ -140,7 +197,7 @@ Changes with Unit 1.29.1 28 Feb 2023 *) Bugfix: stop creating world-writeable directories. - *) Bugfix: memory leak related to NJS. + *) Bugfix: memory leak related to njs. *) Bugfix: path parsing in PHP applications. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 77343271..eeee56cb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,15 +14,16 @@ appreciate that you are considering contributing! ## Getting Started Check out the [Quick Installation](README.md#quick-installation) and -[Howto](https://unit.nginx.org/howto/) guides to get NGINX Unit up and running. +[Howto](https://unit.nginx.org/howto/) guides to get NGINX Unit up and +running. ## Ask a Question -Please open an [issue](https://github.com/nginx/unit/issues/new) on GitHub with -the label `question`. You can also ask a question on -[GitHub Discussions](https://github.com/nginx/unit/discussions) or the NGINX Unit mailing list, -unit@nginx.org (subscribe +Please open an [issue](https://github.com/nginx/unit/issues/new) on GitHub +with the label `question`. You can also ask a question on +[GitHub Discussions](https://github.com/nginx/unit/discussions) or the NGINX +Unit mailing list, unit@nginx.org (subscribe [here](https://mailman.nginx.org/mailman3/lists/unit.nginx.org/)). @@ -47,44 +48,47 @@ the expected behavior that doesn't occur. To suggest an enhancement, open an [issue](https://github.com/nginx/unit/issues/new) on GitHub with the label -`enhancement`. Please do this before implementing a new feature to discuss the -feature first. +`enhancement`. Please do this before implementing a new feature to discuss +the feature first. ### Open a Pull Request -Before submitting a PR, please read the NGINX Unit code guidelines to know more -about coding conventions and benchmarks. Fork the repo, create a branch, and -submit a PR when your changes are tested and ready for review. Again, if you'd -like to implement a new feature, please consider creating a feature request -issue first to start a discussion about the feature. +Before submitting a PR, please read the NGINX Unit code guidelines to know +more about coding conventions and benchmarks. Fork the repo, create a branch, +and submit a PR when your changes are tested and ready for review. Again, if +you'd like to implement a new feature, please consider creating a feature +request issue first to start a discussion about the feature. ## Git Style Guide -- Keep a clean, concise and meaningful `git commit` history on your branch, - rebasing locally and squashing before submitting a PR +- Create atomic commits. A commit should do just one thing, i.e. you + shouldn't mix refactoring with functional code changes. Do the + refactoring in one or more commits first. -- For any user-visible changes, updates, and bugfixes, add a note to - `docs/changes.xml` under the section for the upcoming release, using `<change - type="feature">` for new functionality, `<change type="change">` for changed - behavior, and `<change type="bugfix">` for bug fixes. + Ideally you should rebase locally and force push new commits up. -- In the subject line, use the past tense ("Added feature", not "Add feature"); - also, use past tense to describe past scenarios, and present tense for - current behavior +- In the subject line, use the imperative mood. I.e. write the subject like + you're giving git a command, e.g. "Free memory before exiting". Do not + terminate the subject with a `.` -- Limit the subject line to 67 characters, and the rest of the commit message - to 80 characters +- Try to limit the subject line to around 50 characters, but try not to + exceed 72. -- Use subject line prefixes for commits that affect a specific portion of the - code; examples include "Tests:", "Packages:", or "Docker:", and also - individual languages such as "Java:" or "Ruby:" +- Wrap the body of the commit message after 72 characters. -- Reference issues and PRs liberally after the subject line; if the commit - remedies a GitHub issue, [name - it](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) - accordingly +- Use lowercase subject line prefixes for commits that affect a specific + portion of the code; examples include "tests:", "ci:", or "http:", and + also individual languages such as "python:" or "php:". If multiple areas + are affected you can specify multiple prefixes, e.g. "auto, perl:" -- Don't rely on command-line commit messages with `-m`; use the editor instead +- If the commit fixes an open issue then you can use the "Closes:" + tag/trailer to reference it and have GitHub automatically close it once + it's been merged. E.g.: + + `Closes: https://github.com/nginx/unit/issues/9999` + + That should go at the end of the commit message, separated by a blank line, + along with any other tags. @@ -1,5 +1,9 @@ # NGINX Unit +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) +[![CI](https://github.com/nginx/unit/actions/workflows/ci.yml/badge.svg)](https://github.com/nginx/unit/actions/workflows/ci.yml "GitHub workflow CI") +[![GitHub Discussions](https://img.shields.io/badge/GitHub-discussions-009639)](https://github.com/nginx/unit/discussions "GitHub Discussions") + ## Universal Web App Server ![NGINX Unit Logo](docs/unitlogo.svg) @@ -8,7 +12,7 @@ NGINX Unit is a lightweight and versatile open-source server that has two primary capabilities: - serves static media assets, -- runs application code in seven languages. +- runs application code in eight languages. Unit compresses several layers of the modern application stack into a potent, coherent solution with a focus on performance, low latency, and scalability. It @@ -22,11 +26,12 @@ achieve that with a complex, asynchronous, multithreading architecture comprising multiple processes to ensure security and robustness while getting the most out of today's computing platforms. - -## Quick Installation +## Installation ### macOS +Run the following command to install both `unitd` (the Unit daemon) and `unitctl` (the control tool). + ``` console $ brew install nginx/unit/unit ``` @@ -34,47 +39,136 @@ $ brew install nginx/unit/unit For details and available language packages, see the [docs](https://unit.nginx.org/installation/#homebrew). - ### Docker ``` console -$ docker pull unit +$ docker pull unit:<TAG> +$ mkdir /tmp/unit-control # customize as needed. +$ docker run -d \ + --mount type=bind,src=/tmp/unit-control,dst=/var/run \ + --mount type=bind,src=.,dst=/www \ + --network host \ + unit ``` For a description of image tags, see the [docs](https://unit.nginx.org/installation/#docker-images). +WARNING: latest image tag may not provide support for specific language +modules, *do* check the available image tags from the link above before +pulling your image. + +Your current working directory will now be mounted to the Unit image at `/www`. +You can reach its socket at `/tmp/unit-control/control.unit.sock` assuming no +further customizations have been made. -### Amazon Linux, Fedora, Red Hat +### Debian, Ubuntu, Amazon Linux, Fedora, Red Hat +This helper script configures the correct package repositories for system. ``` console $ wget https://raw.githubusercontent.com/nginx/unit/master/tools/setup-unit && chmod +x setup-unit -# ./setup-unit repo-config && yum install unit -# ./setup-unit welcome +# ./setup-unit repo-config +``` + +Debian derivatives: +``` console +# apt install unit +``` + +Fedora derivatives: +``` console +# yum install unit ``` For details and available language packages, see the [docs](https://unit.nginx.org/installation/#official-packages). +## Getting Started with `unitctl` + +[`unitctl`](tools/README.md) streamlines the management of NGINX Unit processes +through an easy-to-use command line interface. To get started with `unitctl`, +download it from the +[official GitHub releases](https://github.com/nginx/unit/releases) +or [Homebrew](#macos). + +### Installation -### Debian, Ubuntu +> [!NOTE] +> If you installed Unit with [Homebrew](#macos), you can skip this step +> as `unitctl` is included by default. + +Download the appropriate `unitctl` binary for your system from the +[NGINX Unit releases](https://github.com/nginx/unit/releases/). ``` console -$ wget https://raw.githubusercontent.com/nginx/unit/master/tools/setup-unit && chmod +x setup-unit -# ./setup-unit repo-config && apt install unit -# ./setup-unit welcome +$ tar xzvf unitctl-master-x86_64-unknown-linux-gnu.tar.gz +# mv unitctl /usr/local/bin/ ``` -For details and available language packages, see the -[docs](https://unit.nginx.org/installation/#official-packages). +## Launch Unit using Docker +If you have [Docker installed](https://docs.docker.com/engine/install/) on +your machine, and then you can effortlessly spin up one of +[Unit's official Docker images](https://hub.docker.com/_/unit) +alongside your application. + +> [!TIP] +> How-to and configuration guides are available on +[unit.nginx.org](https://unit.nginx.org/howto/) for web application frameworks +built with Python, PHP, WebAssembly, Node.js, Ruby, and more. -## Running a Hello World App +Here's an example using the `unit:python` Docker image: +``` console +$ unitctl instances new 127.0.0.1:8001 /path/to/app 'unit:python' +``` + +`/path/to/app` will mount to `/www` in the Docker filesystem. + +Save this to `/path/to/app/wsgi.py`: +```python +def application(environ, start_response): + start_response("200 OK", [("Content-Type", "text/plain")]) + return (b"Hello, Python on Unit!") +``` + +You can then interactively edit the currently active configuration: +``` console +$ unitctl edit +``` +```jsonc +{ + "listeners": { + "*:8000": { + // Point listener to new application + "pass": "applications/python" + } + }, + + // Add an application definition + "applications": { + "python": { + "type": "python", + "path": "/www/", + "module": "wsgi" + } + } +} +``` +Valid configurations will be applied upon save and close. + +``` console +$ curl localhost:8000 + +Hello, Python on Unit! +``` +More Python configuration examples can be found in the +[Unit docs](https://unit.nginx.org/howto/samples/#python). + +## Hello World with PHP and curl Unit runs apps in a [variety of languages](https://unit.nginx.org/howto/samples/). -Let's consider a basic example, -choosing PHP for no particular reason. +Let's explore the configuration of a simple PHP app on Unit with `curl`. Suppose you saved a PHP script as `/www/helloworld/index.php`: ``` php @@ -104,7 +198,6 @@ usually available by default via a Unix domain socket: /path/to/control.unit.sock http://localhost/config/applications ``` ``` json - { "success": "Reconfiguration done." } @@ -158,8 +251,10 @@ Unit's output should contain both snippets, neatly organized: } ``` -For full details of configuration management, see the -[docs](https://unit.nginx.org/configuration/#configuration-management). +## WebAssembly +Unit supports running WebAssembly Components (WASI 0.2). +For more information see the +[Unit Configuration Docs](https://unit.nginx.org/configuration/#configuration-wasm). ## OpenAPI Specification @@ -167,11 +262,6 @@ Our [OpenAPI specification](docs/unit-openapi.yaml) aims to simplify configuring and integrating NGINX Unit deployments and provide an authoritative source of knowledge about the control API. -Although the specification is still in the early beta stage, it is a promising -step forward for the NGINX Unit community. While working on it, we kindly ask -you to experiment and provide feedback to help improve its functionality and -usability. - ## Community - The go-to place to start asking questions and share your thoughts is @@ -192,7 +282,6 @@ usability. - To reach the team directly, subscribe to the [mailing list](https://mailman.nginx.org/mailman/listinfo/unit). -- For security issues, [email us](security-alert@nginx.org), mentioning - NGINX Unit in the subject and following the [CVSS +- For security issues, [email us](mailto:security-alert@nginx.org), + mentioning NGINX Unit in the subject and following the [CVSS v3.1](https://www.first.org/cvss/v3.1/specification-document) spec. - diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..450f989d --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,21 @@ +# Security Policy + +## Latest Versions + +We advise users to run or update to the most recent release of this +project. Older versions of this project may not have all enhancements +and/or bug fixes applied to them. + +## Reporting a Vulnerability + +The F5 Security Incident Response Team (F5 SIRT) has an email alias that +makes it easy to report potential security vulnerabilities: + +- If you’re an F5 customer with an active support contract, please +contact [F5 Technical Support](https://www.f5.com/services/support). +- If you aren’t an F5 customer, please report any potential or current +instances of security vulnerabilities with any F5 product to the +F5 Security Incident Response Team at <F5SIRT@f5.com>. + +For more information please read the F5 SIRT vulnerability reporting +guidelines available at [https://www.f5.com/services/support/report-a-vulnerability](https://www.f5.com/services/support/report-a-vulnerability).
\ No newline at end of file diff --git a/SECURITY.txt b/SECURITY.txt deleted file mode 100644 index 2d2ad2b1..00000000 --- a/SECURITY.txt +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN PGP SIGNED MESSAGE----- -Hash: SHA256 - -# -# Please report security issues as below, specifically -# mentioning NGINX Unit in the subject. -# -Canonical: https://unit.nginx.org/.well-known/security.txt -Contact: mailto:security-alert@nginx.org -Encryption: https://nginx.org/keys/maxim.key -Encryption: https://nginx.org/keys/sb.key -Encryption: https://nginx.org/keys/thresh.key -Expires: 2025-01-01T00:00:00.000Z -Policy: https://www.first.org/cvss/v3.1/specification-document -Preferred-Languages: en - ------BEGIN PGP SIGNATURE----- - -iQGzBAEBCAAdFiEEE8gqY7YDV2FW4wpOoOqYG2aw2WcFAmWdrbkACgkQoOqYG2aw -2WdAIQv/UpQXSYWboNMq9DnXZsMNdeCdAg8nv1PdNYfDzr21YavHsdP3upEg2NUX -M/9WiyO5HwV7FAWKQ8J6T0vg8EZITij5Dxhia4Z/h9QE6bXTH4rD/UViJ+/RTtwF -3WvaMNGUSlTQUNCRQ0QGTAb/jXUQCE8OwFz2UM0ZgqyUmIdkuxMEhsNd4AfAUS4A -OOhM6qfXXAulPNVFZ65Lx7NIner37OyNuzhyuQxIFsnbGagMEIvptkevNIMEy8WO -BeseYx/fp1gHdLTIUKl+nvKR7as5O+fFZSm/eG3VpkS6Fall54WX6zzalhZN7Pie -pze8YdbUukdMUV6wQ/pQH4e/QyEEI8RCk95cZE9mSfxygpbIfBypj66GTaOUC/2z -iTv2tX/DXiGQbSpkNLzwntVvuN5P9BebxmSKdspwfszccPzNhhCVQMkkhzvNVeQ6 -UTorp2O3xvi5fBIUWQU5xkrKqwAmZBYHMPDA97H9hiTmHkytd7YYkvPmJKNDksSa -ui3gNrJe -=yDJD ------END PGP SIGNATURE----- diff --git a/auto/atomic b/auto/atomic index f99adf7e..08061e9a 100644 --- a/auto/atomic +++ b/auto/atomic @@ -33,87 +33,6 @@ nxt_feature_test="int main(void) { . auto/feature -# Solaris 10 builtin atomic operations. - -if [ $nxt_found = no ]; then - - nxt_feature="Solaris builtin atomic operations" - nxt_feature_name=NXT_HAVE_SOLARIS_ATOMIC - nxt_feature_run=yes - nxt_feature_incs= - nxt_feature_libs= - nxt_feature_test="#include <atomic.h> - - int main(void) { - ulong_t n = 0; - - if (atomic_cas_ulong(&n, 0, 3) != 0) - return 1; - if (atomic_add_long_nv(&n, 1) != 4) - return 1; - if (atomic_swap_ulong(&n, 5) != 4) - return 1; - if (n != 5) - return 1; - if (atomic_or_ulong_nv(&n, 2) != 7) - return 1; - if (atomic_and_ulong_nv(&n, 5) != 5) - return 1; - return 0; - }" - . auto/feature -fi - - -# AIX xlC builtin atomic operations. - -if [ $nxt_found = no ]; then - - if [ $NXT_64BIT = 1 ]; then - nxt_feature_test="int main(void) { - long n = 0; - long o = 0; - - if (!__compare_and_swaplp(&n, &o, 3)) - return 1; - if (__fetch_and_addlp(&n, 1) != 3) - return 1; - if (__fetch_and_swaplp(&n, 5) != 4) - return 1; - if (n != 5) - return 1; - __isync(); - __lwsync(); - return 0; - }" - else - nxt_feature_test="int main(void) { - int n = 0; - int o = 0; - - if (!__compare_and_swap(&n, &o, 3)) - return 1; - if (__fetch_and_add(&n, 1) != 3) - return 1; - if (__fetch_and_swap(&n, 5) != 4) - return 1; - if (n != 5) - return 1; - __isync(); - __lwsync(); - return 0; - }" - fi - - nxt_feature="xlC builtin atomic operations" - nxt_feature_name=NXT_HAVE_XLC_ATOMIC - nxt_feature_run=yes - nxt_feature_incs= - nxt_feature_libs= - . auto/feature -fi - - if [ $nxt_found = no ]; then $echo $echo $0: error: no atomic operations found. diff --git a/auto/cc/deps b/auto/cc/deps index 11429788..8b4deca7 100644 --- a/auto/cc/deps +++ b/auto/cc/deps @@ -1,18 +1,6 @@ case "$NXT_CC_NAME" in - SunC): - nxt_gen_dep_flags() { - $echo "-xMMD -xMF $NXT_BUILD_DIR/$nxt_dep.tmp" - } - - nxt_gen_dep_post() { - $echo -n "@sed -e 's#^.*:#$NXT_BUILD_DIR/$nxt_obj:#' " - $echo -n "$NXT_BUILD_DIR/$nxt_dep.tmp > $NXT_BUILD_DIR/$nxt_dep" - $echo " && rm -f $NXT_BUILD_DIR/$nxt_dep.tmp" - } - ;; - *) nxt_gen_dep_flags() { $echo "-MMD -MF $NXT_BUILD_DIR/$nxt_dep -MT $NXT_BUILD_DIR/$nxt_obj" diff --git a/auto/cc/test b/auto/cc/test index 9c9602db..34e4379e 100644 --- a/auto/cc/test +++ b/auto/cc/test @@ -46,45 +46,9 @@ then $echo " + $NXT_CC_VERSION" else -if `/bin/sh -c "($CC -V)" 2>&1 | grep "Sun C" >> $NXT_AUTOCONF_ERR 2>&1` -then - NXT_CC_NAME=SunC - $echo " + using Sun C compiler" - NXT_CC_VERSION=`/bin/sh -c "($CC -V)" 2>&1 | grep "Sun C" 2>&1` - $echo " + $NXT_CC_VERSION" - -else -if `/bin/sh -c "($CC -qversion)" 2>&1 \ - | grep "^IBM XL" >> $NXT_AUTOCONF_ERR 2>&1` -then - NXT_CC_NAME=xlC - $echo " + using AIX xlC compiler" - NXT_CC_VERSION=`/bin/sh -c "($CC -qversion)" 2>&1 | grep "IBM XL" 2>&1` - $echo " + $NXT_CC_VERSION" - -else -if `/bin/sh -c "($CC -V)" 2>&1 | grep "Intel(R) C" >> $NXT_AUTOCONF_ERR 2>&1` -then - NXT_CC_NAME=ICC - $echo " + using Intel C++ compiler" - NXT_CC_VERSION=ICC - -else -if `/bin/sh -c "($CC -v)" 2>&1 \ - | grep "Microsoft (R) 32-bit C/C" >> $NXT_AUTOCONF_ERR 2>&1` -then - NXT_CC_NAME=MSVC - $echo " + using MS Visual C++ compiler" - NXT_CC_VERSION=MSVC - -else NXT_CC_NAME=cc NXT_CC_VERSION=cc -fi # MSVC -fi # ICC -fi # xlC -fi # SunC fi # Apple LLVM clang fi # clang fi # gcc @@ -101,12 +65,12 @@ case $NXT_CC_NAME in # Do not export symbols except explicitly marked with NXT_EXPORT. NXT_CFLAGS="$NXT_CFLAGS -fvisibility=hidden" - # c99/gnu99 conflict with Solaris XOPEN. - #NXT_CFLAGS="$NXT_CFLAGS -std=gnu99" + NXT_CFLAGS="$NXT_CFLAGS -fno-strict-overflow" + + NXT_CFLAGS="$NXT_CFLAGS -std=gnu11" NXT_CFLAGS="$NXT_CFLAGS -O" - #NXT_CFLAGS="$NXT_CFLAGS -O0" - NXT_CFLAGS="$NXT_CFLAGS -W -Wall -Wextra" + NXT_CFLAGS="$NXT_CFLAGS -Wall -Wextra" #NXT_CFLAGS="$NXT_CFLAGS -Wunused-result" NXT_CFLAGS="$NXT_CFLAGS -Wno-unused-parameter" @@ -115,12 +79,14 @@ case $NXT_CC_NAME in # -O2 enables -fstrict-aliasing and -fstrict-overflow. #NXT_CFLAGS="$NXT_CFLAGS -O2" - #NXT_CFLAGS="$NXT_CFLAGS -Wno-strict-aliasing" + NXT_CFLAGS="$NXT_CFLAGS -fno-strict-aliasing" #NXT_CFLAGS="$NXT_CFLAGS -fomit-frame-pointer" #NXT_CFLAGS="$NXT_CFLAGS -momit-leaf-frame-pointer" - # -Wstrict-overflow is supported by GCC 4.2+. + # While -Wstrict-overflow is supported by GCC 4.2+, + # at level 3 and above it is prone to give false + # positives. #NXT_CFLAGS="$NXT_CFLAGS -Wstrict-overflow=5" NXT_CFLAGS="$NXT_CFLAGS -Wmissing-prototypes" @@ -141,17 +107,22 @@ case $NXT_CC_NAME in # Do not export symbols except explicitly marked with NXT_EXPORT. NXT_CFLAGS="$NXT_CFLAGS -fvisibility=hidden" + NXT_CFLAGS="$NXT_CFLAGS -fno-strict-overflow" + + NXT_CFLAGS="$NXT_CFLAGS -std=gnu11" + NXT_CFLAGS="$NXT_CFLAGS -O" - #NXT_CFLAGS="$NXT_CFLAGS -O0" - NXT_CFLAGS="$NXT_CFLAGS -W -Wall -Wextra" + NXT_CFLAGS="$NXT_CFLAGS -Wall -Wextra" #NXT_CFLAGS="$NXT_CFLAGS -Wunused-result" NXT_CFLAGS="$NXT_CFLAGS -Wno-unused-parameter" #NXT_CFLAGS="$NXT_CFLAGS -Wshorten-64-to-32" NXT_CFLAGS="$NXT_CFLAGS -Wwrite-strings" #NXT_CFLAGS="$NXT_CFLAGS -O2" + # strict-aliasing is always enabled by default in clang + NXT_CFLAGS="$NXT_CFLAGS -fno-strict-aliasing" + #NXT_CFLAGS="$NXT_CFLAGS -fomit-frame-pointer" - NXT_CFLAGS="$NXT_CFLAGS -fstrict-aliasing" NXT_CFLAGS="$NXT_CFLAGS -Wstrict-overflow=5" NXT_CFLAGS="$NXT_CFLAGS -Wmissing-prototypes" @@ -168,38 +139,6 @@ case $NXT_CC_NAME in fi ;; - SunC) - nxt_have=NXT_SUNC . auto/have - - NXT_CFLAGS="$NXT_CFLAGS -fPIC" - # Optimization. - NXT_CFLAGS="$NXT_CFLAGS -O -fast" - # Stop on warning. - NXT_CFLAGS="$NXT_CFLAGS -errwarn=%all" - # Debug. - NXT_CFLAGS="$NXT_CFLAGS -g" - ;; - - xlC) - nxt_have=NXT_XLC . auto/have - - #NXT_CFLAGS="$NXT_CFLAGS -qalloca" - # alloca support. - NXT_CFLAGS="$NXT_CFLAGS -qlanglvl=extc99" - # __thread support. - NXT_CFLAGS="$NXT_CFLAGS -qtls" - # Suppress warning - # 1506-159 (E) Bit field type specified for XXX is not valid. - # Type unsigned assumed. - NXT_CFLAGS="$NXT_CFLAGS -qsuppress=1506-159" - ;; - - ICC) - ;; - - MSVC) - ;; - *) ;; diff --git a/auto/fuzzing b/auto/fuzzing new file mode 100644 index 00000000..f792941b --- /dev/null +++ b/auto/fuzzing @@ -0,0 +1,75 @@ +# Copyright (C) NGINX, Inc. + + +if [ -n "$NXT_FUZZ" ]; then + + # Fuzz-Test object files list. + + $echo "NXT_FUZZ_OBJS = \\" >> $NXT_MAKEFILE + + for nxt_src in $NXT_FUZZ_SRCS + do + nxt_obj=${nxt_src%.c}.o + $echo " $NXT_BUILD_DIR/$nxt_obj \\" >> $NXT_MAKEFILE + done + + + # Fuzz-Test executables. + + cat << END >> $NXT_MAKEFILE + +.PHONY: fuzz +fuzz: $NXT_BUILD_DIR/fuzz_basic \\ + $NXT_BUILD_DIR/fuzz_http_controller \\ + $NXT_BUILD_DIR/fuzz_http_h1p \\ + $NXT_BUILD_DIR/fuzz_http_h1p_peer \\ + $NXT_BUILD_DIR/fuzz_json + +$NXT_BUILD_DIR/fuzz_basic: \$(NXT_FUZZ_OBJS) \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/fuzz_basic \\ + \$(CFLAGS) $NXT_BUILD_DIR/fuzzing/nxt_basic_fuzz.o \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ + $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS \\ + $NXT_FUZZ + +$NXT_BUILD_DIR/fuzz_http_controller: \$(NXT_FUZZ_OBJS) \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/fuzz_http_controller \\ + \$(CFLAGS) $NXT_BUILD_DIR/fuzzing/nxt_http_controller_fuzz.o \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ + $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS \\ + $NXT_FUZZ + +$NXT_BUILD_DIR/fuzz_http_h1p: \$(NXT_FUZZ_OBJS) \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/fuzz_http_h1p \\ + \$(CFLAGS) $NXT_BUILD_DIR/fuzzing/nxt_http_h1p_fuzz.o \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ + $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS \\ + $NXT_FUZZ + +$NXT_BUILD_DIR/fuzz_http_h1p_peer: \$(NXT_FUZZ_OBJS) \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/fuzz_http_h1p_peer \\ + \$(CFLAGS) $NXT_BUILD_DIR/fuzzing/nxt_http_h1p_peer_fuzz.o \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ + $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS \\ + $NXT_FUZZ + +$NXT_BUILD_DIR/fuzz_json: \$(NXT_FUZZ_OBJS) \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/fuzz_json \\ + \$(CFLAGS) $NXT_BUILD_DIR/fuzzing/nxt_json_fuzz.o \\ + $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ + $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS \\ + $NXT_FUZZ + +END + +fi @@ -50,10 +50,12 @@ cat << END --openssl enable OpenSSL library usage - --njs enable NJS library usage + --njs enable njs library usage --debug enable debug logging + --fuzz=ENGINE enable fuzz testing + python OPTIONS configure Python module run "./configure python --help" to see available options @@ -10,10 +10,18 @@ $echo "creating $NXT_MAKEFILE" cat << END > $NXT_MAKEFILE +# Pretty print compiler etc actions... +PP_CC := @echo ' CC ' +PP_AR := @echo ' AR ' +PP_LD := @echo ' LD ' +PP_VER := @echo ' VER ' +PP_SED := @echo ' SED ' + CC = $CC AR = $AR -CFLAGS = $NXT_CFLAGS $NXT_CC_OPT $CFLAGS +EXTRA_CFLAGS = +CFLAGS = $NXT_CFLAGS $NXT_CC_OPT $CFLAGS \$(EXTRA_CFLAGS) NXT_EXEC_LINK = $NXT_EXEC_LINK $NXT_LD_OPT NXT_SHARED_LOCAL_LINK = $NXT_SHARED_LOCAL_LINK $NXT_LD_OPT @@ -28,6 +36,47 @@ manpage: $NXT_BUILD_DIR/share/man/man8/unitd.8 END +NXT_OS=$(uname -s) +NXT_GNU_MAKE=$(make --version | grep GNU || true) + +# Requires GNU make. On OpenIndiana at least we have to use gmake +if [ -n "$NXT_GNU_MAKE" ] || [ $NXT_OS = "SunOS" ]; then + + cat << END >> $NXT_MAKEFILE +# By default compiler etc output is hidden, use +# make V=1 ... +# to show it. +V := 0 + +v := @ +ifeq (\$V,1) + v := +endif + +# Optionally enable debugging builds with +# make D=1 ... +# -g is always used, this just changes the optimisation level. +# On GCC this would be -Og, however according to the clang(1) +# man page, -O0 'generates the most debuggable code'. +D := 0 + +ifeq (\$D,1) + CFLAGS += -O0 +endif + +# Optionally disable -Werror with +# make E=0 ... +E := 1 + +ifeq (\$E,0) + CFLAGS += -Wno-error +endif + +END + +fi + + # The include paths list. $echo -n "NXT_LIB_INCS =" >> $NXT_MAKEFILE @@ -75,8 +124,10 @@ cat << END >> $NXT_MAKEFILE include version $NXT_VERSION_H: version - $echo '#define NXT_VERSION "\$(NXT_VERSION)"' > $NXT_VERSION_H - $echo '#define NXT_VERNUM \$(NXT_VERNUM)' >> $NXT_VERSION_H + \$(PP_VER) \$@ \(NXT_VERSION\) + \$(v)$echo '#define NXT_VERSION "\$(NXT_VERSION)"' > $NXT_VERSION_H + \$(PP_VER) \$@ \(NXT_VERNUM\) + \$(v)$echo '#define NXT_VERNUM \$(NXT_VERNUM)' >> $NXT_VERSION_H END @@ -88,23 +139,26 @@ cat << END >> $NXT_MAKEFILE libnxt: $NXT_BUILD_DIR/lib/$NXT_LIB_SHARED $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC $NXT_BUILD_DIR/lib/$NXT_LIB_SHARED: \$(NXT_LIB_OBJS) - \$(NXT_SHARED_LOCAL_LINK) -o \$@ \$(NXT_LIB_OBJS) \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_SHARED_LOCAL_LINK) -o \$@ \$(NXT_LIB_OBJS) \\ $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC: \$(NXT_LIB_OBJS) - $NXT_STATIC_LINK \$@ \$(NXT_LIB_OBJS) + \$(PP_AR) \$@ + \$(v)$NXT_STATIC_LINK \$@ \$(NXT_LIB_OBJS) $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC: \$(NXT_LIB_UNIT_OBJS) \\ $NXT_BUILD_DIR/share/pkgconfig/unit.pc \\ $NXT_BUILD_DIR/share/pkgconfig/unit-uninstalled.pc - $NXT_STATIC_LINK \$@ \$(NXT_LIB_UNIT_OBJS) + \$(PP_AR) \$@ + \$(v)$NXT_STATIC_LINK \$@ \$(NXT_LIB_UNIT_OBJS) END # Object files. -for nxt_src in $NXT_LIB_SRCS $NXT_TEST_SRCS $NXT_LIB_UNIT_SRCS \ +for nxt_src in $NXT_LIB_SRCS $NXT_TEST_SRCS $NXT_FUZZ_SRCS $NXT_LIB_UNIT_SRCS \ src/test/nxt_unit_app_test.c \ src/test/nxt_unit_websocket_chat.c \ src/test/nxt_unit_websocket_echo.c @@ -116,7 +170,8 @@ do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - \$(CC) -c \$(CFLAGS) \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ -o $NXT_BUILD_DIR/$nxt_obj \\ $nxt_dep_flags \\ $nxt_src @@ -136,7 +191,8 @@ nxt_dep_post=`nxt_gen_dep_post` cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - \$(CC) -c \$(CFLAGS) -DNXT_NCQ_TEST=1 \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) -DNXT_NCQ_TEST=1 \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ -o $NXT_BUILD_DIR/$nxt_obj \\ $nxt_dep_flags \\ $nxt_src @@ -154,7 +210,8 @@ nxt_dep_post=`nxt_gen_dep_post` cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - \$(CC) -c \$(CFLAGS) -DNXT_NCQ_TEST=0 \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) -DNXT_NCQ_TEST=0 \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ -o $NXT_BUILD_DIR/$nxt_obj \\ $nxt_dep_flags \\ $nxt_src @@ -192,14 +249,16 @@ tests: $NXT_BUILD_DIR/tests $NXT_BUILD_DIR/utf8_file_name_test \\ $NXT_BUILD_DIR/tests: \$(NXT_TEST_OBJS) \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/tests \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/tests \\ \$(CFLAGS) \$(NXT_TEST_OBJS) \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS $NXT_BUILD_DIR/utf8_file_name_test: $NXT_LIB_UTF8_FILE_NAME_TEST_SRCS \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC - \$(CC) \$(CFLAGS) \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ + \$(PP_LD) \$@ + \$(v)\$(CC) \$(CFLAGS) \$(NXT_LIB_INCS) $NXT_LIB_AUX_CFLAGS \\ -o $NXT_BUILD_DIR/utf8_file_name_test \\ $NXT_LIB_UTF8_FILE_NAME_TEST_SRCS \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ @@ -207,21 +266,24 @@ $NXT_BUILD_DIR/utf8_file_name_test: $NXT_LIB_UTF8_FILE_NAME_TEST_SRCS \\ $NXT_BUILD_DIR/ncq_test: $NXT_BUILD_DIR/src/test/nxt_ncq_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/ncq_test \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/ncq_test \\ \$(CFLAGS) $NXT_BUILD_DIR/src/test/nxt_ncq_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS $NXT_BUILD_DIR/vbcq_test: $NXT_BUILD_DIR/src/test/nxt_vbcq_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/vbcq_test \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/vbcq_test \\ \$(CFLAGS) $NXT_BUILD_DIR/src/test/nxt_vbcq_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS $NXT_BUILD_DIR/unit_app_test: $NXT_BUILD_DIR/src/test/nxt_unit_app_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_app_test \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_app_test \\ \$(CFLAGS) $NXT_BUILD_DIR/src/test/nxt_unit_app_test.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS @@ -229,7 +291,8 @@ $NXT_BUILD_DIR/unit_app_test: $NXT_BUILD_DIR/src/test/nxt_unit_app_test.o \\ $NXT_BUILD_DIR/unit_websocket_chat: \\ $NXT_BUILD_DIR/src/test/nxt_unit_websocket_chat.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_websocket_chat \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_websocket_chat \\ \$(CFLAGS) $NXT_BUILD_DIR/src/test/nxt_unit_websocket_chat.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS @@ -237,7 +300,8 @@ $NXT_BUILD_DIR/unit_websocket_chat: \\ $NXT_BUILD_DIR/unit_websocket_echo: \\ $NXT_BUILD_DIR/src/test/nxt_unit_websocket_echo.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC - \$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_websocket_echo \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o $NXT_BUILD_DIR/unit_websocket_echo \\ \$(CFLAGS) $NXT_BUILD_DIR/src/test/nxt_unit_websocket_echo.o \\ $NXT_BUILD_DIR/lib/$NXT_LIB_UNIT_STATIC \\ $NXT_LD_OPT $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS @@ -296,7 +360,8 @@ cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/sbin/$NXT_DAEMON: $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ \$(NXT_OBJS) - \$(NXT_EXEC_LINK) -o \$@ \$(CFLAGS) \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_EXEC_LINK) -o \$@ \$(CFLAGS) \\ \$(NXT_OBJS) $NXT_BUILD_DIR/lib/$NXT_LIB_STATIC \\ $NXT_LIBM $NXT_LIBS $NXT_LIB_AUX_LIBS @@ -309,7 +374,8 @@ cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/share/man/man8/unitd.8: docs/man/man8/unitd.8.in \\ $NXT_BUILD_DIR/include/nxt_auto_config.h - sed -e "s|%%ERROR_LOG_PATH%%|$NXT_LOG|" \\ + \$(PP_SED) \$@ + \$(v)sed -e "s|%%ERROR_LOG_PATH%%|$NXT_LOG|" \\ -e "s|%%PID_PATH%%|$NXT_PID|" \\ -e "s|%%SOCKET_PATH%%|$NXT_CONTROL|" \\ < docs/man/man8/unitd.8.in > \$@ @@ -328,7 +394,8 @@ do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - \$(CC) -c \$(CFLAGS) \$(NXT_INCS) \\ + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) \$(NXT_INCS) \\ $NXT_LIB_AUX_CFLAGS \\ -o $NXT_BUILD_DIR/$nxt_obj \\ $nxt_dep_flags \\ @@ -367,8 +434,6 @@ ${NXT_DAEMON}-install: $NXT_DAEMON install-check || install -d \$(DESTDIR)$NXT_STATEDIR test -d \$(DESTDIR)$NXT_LOGDIR \ || install -d \$(DESTDIR)$NXT_LOGDIR - test -d \$(DESTDIR)$NXT_RUNSTATEDIR \ - || install -d \$(DESTDIR)$NXT_RUNSTATEDIR manpage-install: manpage install-check test -d \$(DESTDIR)$NXT_MANDIR/man8 \ @@ -441,7 +506,8 @@ END cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/share/pkgconfig/unit.pc: src/unit.pc.in - sed -e "s|@PREFIX@|$NXT_PREFIX|" \\ + \$(PP_SED) \$@ + \$(v)sed -e "s|@PREFIX@|$NXT_PREFIX|" \\ -e "s|@LIBDIR@|$NXT_LIBDIR|" \\ -e "s|@CFLAGS@|-I$NXT_INCLUDEDIR|" \\ -e "s|@VERSION@|\$(NXT_VERSION)|" \\ @@ -451,7 +517,8 @@ $NXT_BUILD_DIR/share/pkgconfig/unit.pc: src/unit.pc.in < src/unit.pc.in > \$@ $NXT_BUILD_DIR/share/pkgconfig/unit-uninstalled.pc: src/unit.pc.in - sed -e "s|@PREFIX@|$(pwd)/$NXT_BUILD_DIR|" \\ + \$(PP_SED) \$@ + \$(v)sed -e "s|@PREFIX@|$(pwd)/$NXT_BUILD_DIR|" \\ -e "s|@LIBDIR@|$(pwd)/$NXT_BUILD_DIR/lib|" \\ -e "s|@CFLAGS@|-I$(pwd)/src -I$(pwd)$NXT_BUILD_DIR/include|" \\ -e "s|@VERSION@|\$(NXT_VERSION)|" \\ @@ -472,4 +539,16 @@ include $NXT_MAKEFILE clean: rm -rf $NXT_BUILD_DIR *.dSYM Makefile +.PHONY: help +help: + @echo "Variables to control make/build behaviour:" + @echo + @echo " make V=1 ... - Enables verbose output" + @echo " make D=1 ... - Enables debug builds (-O0)" + @echo " make E=0 ... - Disables -Werror" + @echo + @echo " make EXTRA_CFLAGS= ... - Add extra compiler options" + @echo + @echo " Variables can be combined." + END diff --git a/auto/modules/java b/auto/modules/java index b5c8d70a..4bb34ae7 100644 --- a/auto/modules/java +++ b/auto/modules/java @@ -238,7 +238,7 @@ cat << END > $NXT_JAVA_JARS static const char *nxt_java_system_jars[] = { END -NXT_TOMCAT_VERSION=9.0.86 +NXT_TOMCAT_VERSION=9.0.93 NXT_JAR_VERSION=$NXT_TOMCAT_VERSION @@ -284,7 +284,7 @@ static const char *nxt_java_unit_jars[] = { "$NXT_UNIT_JAR", END -NXT_JAR_VERSION=9.4.54.v20240208 +NXT_JAR_VERSION=9.4.56.v20240826 NXT_JAR_NAMESPACE=org/eclipse/jetty/ NXT_JAR_NAME=jetty-util @@ -297,7 +297,7 @@ NXT_JAR_NAME=jetty-http . auto/modules/java_get_jar NXT_JAR_NAME=classgraph -NXT_JAR_VERSION=4.8.165 +NXT_JAR_VERSION=4.8.175 NXT_JAR_NAMESPACE=io/github/classgraph/ . auto/modules/java_get_jar @@ -371,8 +371,9 @@ for nxt_src in $NXT_JAVA_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - mkdir -p $NXT_BUILD_DIR/src/java - \$(CC) -c \$(CFLAGS) -DNXT_JAVA_MOUNTS_H=\"$NXT_JAVA_MOUNTS_HEADER\" \\ + \$(v)mkdir -p $NXT_BUILD_DIR/src/java + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) -DNXT_JAVA_MOUNTS_H=\"$NXT_JAVA_MOUNTS_HEADER\" \\ \$(NXT_INCS) $NXT_JAVA_INCLUDE \\ $nxt_dep_flags \\ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src @@ -525,7 +526,8 @@ ${NXT_JAVA_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_JAVA_MODULE}.unit.so \ $NXT_BUILD_DIR/$NXT_WS_API_JAR $NXT_BUILD_DIR/lib/unit/modules/${NXT_JAVA_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_JAVA_LDFLAGS $NXT_LD_OPT + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_JAVA_LDFLAGS $NXT_LD_OPT install: ${NXT_JAVA_MODULE}-install diff --git a/auto/modules/java_jar.sha512 b/auto/modules/java_jar.sha512 index a689b901..14516b8c 100644 --- a/auto/modules/java_jar.sha512 +++ b/auto/modules/java_jar.sha512 @@ -1,14 +1,14 @@ -d0c17607eee55e181baa03f1abb2cf77f50e5114c471c2031607206768d8549c74ebb0a276d87dd3f8ea44db5e54e56087311c229ba18ad6013c388fc861beed classgraph-4.8.165.jar +75eb34c016ce4eb4c7a8413ed06570b491f9f2fb852b19097d0e7692c746052da466d6e86810a9c899890a87f49d5c94cffd724b15d47b006e34e7c334437efc classgraph-4.8.175.jar ab441acf5551a7dc81c353eaccb3b3df9e89a48987294d19e39acdb83a5b640fcdff7414cee29f5b96eaa8826647f1d5323e185018fe33a64c402d69c73c9158 ecj-3.26.0.jar -6e1d6fdffcb2acf8daa9ce5b3ad973526a30b3556dc8e950254c68c64cd70e101b28a8acac41b3bd74de6b9c8eac10676afdc3c58ccb1f61a74323721592e0b5 jetty-http-9.4.54.v20240208.jar -780ee47a8722bdfb4b159f440acbfb69afdb73cc329906392b10eba8d30c564aa6377fab129e61b85a56945f01c4403913c80b6ce3158d108d88a3ad64527f06 jetty-server-9.4.54.v20240208.jar -fbe9cf7efeba9f29297f75de3f1c2d98f0e02816a1dc9e6eaddcabb84c3a699a9332218c532017a3707ec57f4f99066bc671708bde4ec84dd873b8403422d7e9 jetty-util-9.4.54.v20240208.jar -1aa9024f49f74b44252f7c90d00bbfdd6aae4e96866708a0c2325def0314c8b7e5ad2fd17bb6b4b135eb2c513fe74b5b591d4b0fe3d1921192cfecadf140b7fa tomcat-api-9.0.86.jar -60a9991ff7b95ef4edfac57cd7c18b6c5177d9aee4f775b5794b5833246b928e1a685b80785babd2f450e3cd18383c58b843b0b5e742252a37044494bc90d608 tomcat-el-api-9.0.86.jar -b09cbfb834564cc7025ffad7bf069569989d3efa3bd176696045aea08bfb53622aa1aece5c84ea4371f0193d4fd477b9179999399e75d04205b219a3ab19bb66 tomcat-jasper-9.0.86.jar -1431469e91debc0ffcf820df2973782221be955dac0739a77d9030ac619cde96320970cb27eb2ff9de1e6bde3227a70b1645d1934da8e10fe2b32c069d33afec tomcat-jasper-el-9.0.86.jar -1ad9ebc1c49beb243c18ab2c459dbd54cab9514223c44b5c7e05d53d290c64c49990fc0fe276c66b1f6f6625acca651fdcb4b7df9e23fb0cc43bc05ad3900798 tomcat-jsp-api-9.0.86.jar -d30055aabf5d45ad350e01702ed0ff4bfbcdd14bee40e1e8a9a7690719816aff019ca961b7970234eaba673c3c13f5cea5dbf1bc0612ce4e8f7de795af2f170d tomcat-juli-9.0.86.jar -d7fa7d7bf35b35b7bb925cce6284e2f750e8e94ee54057daff4e369a32b361e6044e9011048a9dff54b12371ee785d34e82306b60ffae8add76602071e5fc9c5 tomcat-servlet-api-9.0.86.jar -b4a268b79fbfcd610ea5d14446ef71ad5f2ad3da247ae148669e3082ff5fd7f7256a2ecdf2529e4280ed393f53c3a7f6d09a5c38d5653b30b25ab3c4b74ed732 tomcat-util-9.0.86.jar -f2086356c8eca5cfe890232056ce30378422d3994c499845f52ec8641453af02041ae31ffdcb567bb998f0f24465d1ab65456b23e8f781058efdc01658c7252d tomcat-util-scan-9.0.86.jar +48d5512d378d58c3df93bbe1c68d5a4b097e2dc7466992393e0731414dc0fe449312e067d0874e503dd27cb9b6a1f2da0d1f1569a7b484c1f12bd533afbe2723 jetty-http-9.4.56.v20240826.jar +dc0d1f05a7f8477e1f7f57adb307224aadcb1e1fed6f7e5df72d9b84437bf75276c3b0f13da9d079c541ad94601bdbd7314c61477b224dbbdc93a6d5b9f98478 jetty-server-9.4.56.v20240826.jar +025751c45cd8a23f1b436eaa1849af2421f78a2a82bc07c4175df8b4ec5dfc15247036194156897aafb8b0ec83406bbaa25e4092fea5ddd3b900da0ffdfaae19 jetty-util-9.4.56.v20240826.jar +9ea72285b395592dbee58339e3a2a9a8e9d9b623f9c37eace3cb45842107469880d3556a18023f870ef4fcad007705652ff300c12f57fa13af9a8cd666f545de tomcat-api-9.0.93.jar +3a91b9dda9d015aaf0c931847ba40a17bd77102ec0b596a1406142dfac556ab95fa5e0fe554743757d80f28436bcd05f28d2716ca62204bded81f0ed98ac2bf8 tomcat-el-api-9.0.93.jar +d50a0d26c6b8ee2b855df54e7a2647f423181f7fa45fe67daf33b8828122b891cacc7bd8312ae72c05a20708f2b405053045d71939427e1526796b02432db72d tomcat-jasper-9.0.93.jar +4886719cbe70ec7bd9190f691d0a85bc84a343ecdb969eb946b0718f0a8bd670f9bc771a309d2d90b23dae06be1f51d3049df7ed157c90b8f4d724a1064eb21b tomcat-jasper-el-9.0.93.jar +17696c91cb90c71dbebac36cebd5205c001a4ebd4df22eeef0762d818dde16f8b6e8ccfa28301ea2fa6efb001ea2f428a813a2aea1b593f08c7c62d31884571c tomcat-jsp-api-9.0.93.jar +cf9582e6fbc3dae8bdc55e4e687fd2dbc6f44d1bfce77e2117f8673b011d69c07ade7f715493e1d14e205d122284114edc2fe9f19888933645931a093dd26a01 tomcat-juli-9.0.93.jar +280c000ec81893a87feaf7db46702d0b13599e82aedf1540597f69c609bd20a48d04f887131bc464c7f715911aa8c3b44a3502e1c736e5c7575765c419fdb064 tomcat-servlet-api-9.0.93.jar +55a47b7bc9cc3dc47d93f7850d60375d900f7aac3a7b16bdd06e01e4523672ea93cdac8660825307c6699bda8bb5e4f668b2d1d42967194dddceeb56b1acc004 tomcat-util-9.0.93.jar +db933442c1cbbc1e277cce2a374b17b37aea14b3a7f1a42f1bf7152af8edc0d876ee25ae5087f31e0b06474d1e2a134de36f3e3eaca0f04dac734deeecfd773e tomcat-util-scan-9.0.93.jar diff --git a/auto/modules/perl b/auto/modules/perl index 3c88ef0e..86232adc 100644 --- a/auto/modules/perl +++ b/auto/modules/perl @@ -67,6 +67,12 @@ if /bin/sh -c "$NXT_PERL -MConfig -e 'print \"Perl version: \", | sed -e 's/-arch i386//' -e 's/-arch x86_64//'` fi + if [ "$NXT_CC_NAME" = "clang" ]; then + # Perl's CFLAGS has -fwrapv which under clang discards our + # -fno-strict-overflow resulting in an unused argument error + NXT_PERL_CFLAGS="$NXT_PERL_CFLAGS -Qunused-arguments" + fi + nxt_feature="Perl" nxt_feature_name="" nxt_feature_run=no @@ -165,8 +171,9 @@ for nxt_src in $NXT_PERL_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - mkdir -p $NXT_BUILD_DIR/src/perl - \$(CC) -c \$(CFLAGS) $NXT_PERL_CFLAGS \$(NXT_INCS) $NXT_PERL_INCLUDE \\ + \$(v)mkdir -p $NXT_BUILD_DIR/src/perl + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) $NXT_PERL_CFLAGS \$(NXT_INCS) $NXT_PERL_INCLUDE \\ $nxt_dep_flags \\ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src $nxt_dep_post @@ -188,7 +195,8 @@ all: ${NXT_PERL_MODULE} ${NXT_PERL_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_PERL_MODULE}.unit.so $NXT_BUILD_DIR/lib/unit/modules/${NXT_PERL_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_PERL_LDOPTS $NXT_LD_OPT + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_PERL_LDOPTS $NXT_LD_OPT install: ${NXT_PERL_MODULE}-install diff --git a/auto/modules/php b/auto/modules/php index a0f5379c..5bc7401b 100644 --- a/auto/modules/php +++ b/auto/modules/php @@ -244,7 +244,8 @@ for nxt_src in $NXT_PHP_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - \$(CC) -c \$(CFLAGS) $NXT_PHP_ADDITIONAL_FLAGS \$(NXT_INCS) \\ + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) $NXT_PHP_ADDITIONAL_FLAGS \$(NXT_INCS) \\ $NXT_PHP_INCLUDE -DNXT_ZEND_SIGNAL_STARTUP=$NXT_ZEND_SIGNAL_STARTUP \\ $nxt_dep_flags \\ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src @@ -268,7 +269,8 @@ all: ${NXT_PHP_MODULE} ${NXT_PHP_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_PHP_MODULE}.unit.so $NXT_BUILD_DIR/lib/unit/modules/${NXT_PHP_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ \\ $nxt_objs ${NXT_PHP_LIB} ${NXT_PHP_LDFLAGS} $NXT_LD_OPT diff --git a/auto/modules/python b/auto/modules/python index dfd632a1..05dc5132 100644 --- a/auto/modules/python +++ b/auto/modules/python @@ -191,8 +191,9 @@ for nxt_src in $NXT_PYTHON_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - mkdir -p $NXT_BUILD_DIR/src/python - \$(CC) -c \$(CFLAGS) -DNXT_PYTHON_MOUNTS_H=\"$NXT_PYTHON_MOUNTS_HEADER\" \\ + \$(v)mkdir -p $NXT_BUILD_DIR/src/python + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) -DNXT_PYTHON_MOUNTS_H=\"$NXT_PYTHON_MOUNTS_HEADER\" \\ \$(NXT_INCS) $NXT_PYTHON_INCLUDE \\ $nxt_dep_flags \\ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src @@ -216,7 +217,8 @@ all: ${NXT_PYTHON_MODULE} ${NXT_PYTHON_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_PYTHON_MODULE}.unit.so $NXT_BUILD_DIR/lib/unit/modules/${NXT_PYTHON_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ \\ $nxt_objs $NXT_PYTHON_LIBS $NXT_PYTHON_LDFLAGS $NXT_LD_OPT diff --git a/auto/modules/ruby b/auto/modules/ruby index 7a7c9bd3..331e2085 100644 --- a/auto/modules/ruby +++ b/auto/modules/ruby @@ -230,8 +230,9 @@ for nxt_src in $NXT_RUBY_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - mkdir -p $NXT_BUILD_DIR/src/ruby - \$(CC) -c \$(CFLAGS) $NXT_RUBY_CFLAGS -DNXT_RUBY_MOUNTS_H=\"$NXT_RUBY_MOUNTS_HEADER\" \\ + \$(v)mkdir -p $NXT_BUILD_DIR/src/ruby + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) $NXT_RUBY_CFLAGS -DNXT_RUBY_MOUNTS_H=\"$NXT_RUBY_MOUNTS_HEADER\" \\ \$(NXT_INCS) $NXT_RUBY_INCPATH \\ $nxt_dep_flags \\ -o \$@ $nxt_src @@ -254,7 +255,8 @@ all: ${NXT_RUBY_MODULE} ${NXT_RUBY_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_RUBY_MODULE}.unit.so $NXT_BUILD_DIR/lib/unit/modules/${NXT_RUBY_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_RUBY_LIBS $NXT_LD_OPT + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ $nxt_objs $NXT_RUBY_LIBS $NXT_LD_OPT install: ${NXT_RUBY_MODULE}-install diff --git a/auto/modules/wasm b/auto/modules/wasm index 1f388de6..ad672a1e 100644 --- a/auto/modules/wasm +++ b/auto/modules/wasm @@ -63,8 +63,7 @@ NXT_WASM_LDFLAGS= if [ "$NXT_WASM_RUNTIME" = "wasmtime" ]; then NXT_WASM_LDFLAGS=-lwasmtime fi -NXT_WASM_ADDITIONAL_FLAGS="-fno-strict-aliasing \ - -Wno-missing-field-initializers \ +NXT_WASM_ADDITIONAL_FLAGS="-Wno-missing-field-initializers \ -DNXT_HAVE_WASM_$(echo ${NXT_WASM_RUNTIME} | tr 'a-z' 'A-Z') \ " @@ -161,8 +160,9 @@ for nxt_src in $NXT_WASM_MODULE_SRCS; do cat << END >> $NXT_MAKEFILE $NXT_BUILD_DIR/$nxt_obj: $nxt_src $NXT_VERSION_H - mkdir -p $NXT_BUILD_DIR/src/wasm - \$(CC) -c \$(CFLAGS) $NXT_WASM_ADDITIONAL_FLAGS \$(NXT_INCS) \\ + \$(v)mkdir -p $NXT_BUILD_DIR/src/wasm + \$(PP_CC) \$@ + \$(v)\$(CC) -c \$(CFLAGS) $NXT_WASM_ADDITIONAL_FLAGS \$(NXT_INCS) \\ -I$NXT_WASM_INCLUDE_PATH \\ $nxt_dep_flags \\ -o $NXT_BUILD_DIR/$nxt_obj $nxt_src @@ -186,7 +186,8 @@ all: ${NXT_WASM_MODULE} ${NXT_WASM_MODULE}: $NXT_BUILD_DIR/lib/unit/modules/${NXT_WASM_MODULE}.unit.so $NXT_BUILD_DIR/lib/unit/modules/${NXT_WASM_MODULE}.unit.so: $nxt_objs - \$(NXT_MODULE_LINK) -o \$@ \\ + \$(PP_LD) \$@ + \$(v)\$(NXT_MODULE_LINK) -o \$@ \\ $nxt_objs -L${NXT_WASM_LIB_PATH} ${NXT_WASM_LDFLAGS} $NXT_LD_OPT diff --git a/auto/modules/wasm-wasi-component b/auto/modules/wasm-wasi-component index bfb6ffcb..287d2707 100644 --- a/auto/modules/wasm-wasi-component +++ b/auto/modules/wasm-wasi-component @@ -82,15 +82,22 @@ fi $echo " + $NXT_WCM_MODULE module: $NXT_WCM_MOD_NAME" -NXT_OS=$(uname -o) +NXT_OS=$(uname -s) if [ $NXT_OS = "Darwin" ]; then - NXT_CARGO_CMD="cargo rustc --release --manifest-path src/wasm-wasi-component/Cargo.toml -- --emit link=target/release/libwasm_wasi_component.so -C link-args='-undefined dynamic_lookup'" + NXT_CARGO_CMD="cargo rustc --release --manifest-path src/wasm-wasi-component/Cargo.toml -- --emit link=target/release/libwasm_wasi_component.so -C link-args='-undefined dynamic_lookup'" else - NXT_CARGO_CMD="cargo build --release --manifest-path src/wasm-wasi-component/Cargo.toml" + NXT_CARGO_CMD="cargo build --release --manifest-path src/wasm-wasi-component/Cargo.toml" fi +NXT_WCM_DEPS=" \ + build/src/nxt_unit.o \ + src/wasm-wasi-component/build.rs \ + src/wasm-wasi-component/wrapper.h \ + src/wasm-wasi-component/src/lib.rs \ +" + cat << END >> $NXT_MAKEFILE .PHONY: ${NXT_WCM_MODULE} @@ -101,8 +108,10 @@ all: ${NXT_WCM_MODULE} ${NXT_WCM_MODULE}: ${NXT_WCM_MOD_CARGO} -${NXT_WCM_MOD_CARGO}: build/src/nxt_unit.o +${NXT_WCM_MOD_CARGO}: ${NXT_WCM_DEPS} $NXT_CARGO_CMD + @install -p $NXT_WCM_MOD_CARGO \\ + build/lib/unit/modules/${NXT_WCM_MOD_NAME} install: ${NXT_WCM_MODULE}-install @@ -18,15 +18,15 @@ else NXT_NJS_LIBS="-lnjs" fi -nxt_feature="NJS" +nxt_feature="njs" nxt_feature_name=NXT_HAVE_NJS nxt_feature_run=no nxt_feature_incs="$NXT_NJS_CFLAGS $NXT_NJS_AUX_CFLAGS" nxt_feature_libs="$NXT_NJS_LIBS $NXT_NJS_AUX_LIBS" nxt_feature_test="#include <njs.h> - #if NJS_VERSION_NUMBER < 0x000800 - # error NJS < 0.8.0 is not supported. + #if NJS_VERSION_NUMBER < 0x000803 + # error njs < 0.8.3 is not supported. #endif int main(void) { @@ -44,7 +44,7 @@ nxt_feature_test="#include <njs.h> if [ $nxt_found = no ]; then $echo - $echo $0: error: no NJS library \>= 0.8.0 found. + $echo $0: error: no njs library \>= 0.8.3 found. $echo exit 1; fi diff --git a/auto/options b/auto/options index 0550c699..5be1ebe1 100644 --- a/auto/options +++ b/auto/options @@ -42,6 +42,8 @@ NXT_TEST_BUILD_HPUX_SENDFILE=NO NXT_TESTS=NO +NXT_FUZZ= + NXT_HELP=NO for nxt_option @@ -125,6 +127,8 @@ do --tests) NXT_TESTS=YES ;; + --fuzz=*) NXT_FUZZ="$value" ;; + --help) . auto/help exit 0 diff --git a/auto/sched b/auto/sched new file mode 100644 index 00000000..a6a4499b --- /dev/null +++ b/auto/sched @@ -0,0 +1,19 @@ +# Copyright (C) Andrew Clayton +# Copyright (C) NGINX, Inc. + + +nxt_feature="Linux sched_getaffinity()" +nxt_feature_name=NXT_HAVE_LINUX_SCHED_GETAFFINITY +nxt_feature_run=no +nxt_feature_incs= +nxt_feature_libs= +nxt_feature_test="#define _GNU_SOURCE + #include <sched.h> + + int main(void) { + cpu_set_t set; + + sched_getaffinity(0, sizeof(set), &set); + return 0; + }" +. auto/feature @@ -24,8 +24,7 @@ nxt_feature_test="#include <sys/mman.h> shm_unlink(name); - int fd = shm_open(name, O_CREAT | O_EXCL | O_RDWR, - S_IRUSR | S_IWUSR); + int fd = shm_open(name, O_CREAT | O_EXCL | O_RDWR, 0600); if (fd == -1) return 1; @@ -68,7 +67,7 @@ if [ $nxt_found = no ]; then shm_unlink(name); int fd = shm_open(name, O_CREAT | O_EXCL | O_RDWR, - S_IRUSR | S_IWUSR); + 0600); if (fd == -1) return 1; @@ -95,7 +94,7 @@ nxt_feature_test="#include <sys/mman.h> #include <sys/stat.h> int main(void) { - int fd = shm_open(SHM_ANON, O_RDWR, S_IRUSR | S_IWUSR); + int fd = shm_open(SHM_ANON, O_RDWR, 0600); if (fd == -1) return 1; diff --git a/auto/sources b/auto/sources index 6ee4d87b..dfabf7cf 100644 --- a/auto/sources +++ b/auto/sources @@ -41,7 +41,6 @@ NXT_LIB_SRCS=" \ src/nxt_murmur_hash.c \ src/nxt_lvlhsh.c \ src/nxt_array.c \ - src/nxt_vector.c \ src/nxt_list.c \ src/nxt_buf.c \ src/nxt_buf_pool.c \ @@ -308,6 +307,15 @@ if [ $NXT_TESTS = YES ]; then fi +NXT_FUZZ_SRCS=" \ + fuzzing/nxt_basic_fuzz.c \ + fuzzing/nxt_http_controller_fuzz.c \ + fuzzing/nxt_http_h1p_fuzz.c \ + fuzzing/nxt_http_h1p_peer_fuzz.c \ + fuzzing/nxt_json_fuzz.c \ +" + + NXT_SRCS=" \ src/nxt_main.c \ " diff --git a/auto/summary b/auto/summary index 3aa41669..b6caee6c 100644 --- a/auto/summary +++ b/auto/summary @@ -29,11 +29,13 @@ Unit configuration summary: Unix domain sockets support: $NXT_UNIX_DOMAIN TLS support: ............... $NXT_OPENSSL Regex support: ............. $NXT_REGEX - NJS support: ............... $NXT_NJS + njs support: ............... $NXT_NJS process isolation: ......... $NXT_ISOLATION cgroupv2: .................. $NXT_HAVE_CGROUP debug logging: ............. $NXT_DEBUG + fuzz engine: ............... "$NXT_FUZZ" + END @@ -57,6 +57,7 @@ esac mkdir -p $NXT_BUILD_DIR mkdir -p $NXT_BUILD_DIR/bin +mkdir -p $NXT_BUILD_DIR/fuzzing mkdir -p $NXT_BUILD_DIR/include mkdir -p $NXT_BUILD_DIR/lib mkdir -p $NXT_BUILD_DIR/lib/unit/modules @@ -67,7 +68,6 @@ mkdir -p $NXT_BUILD_DIR/src mkdir -p $NXT_BUILD_DIR/src/test mkdir -p $NXT_BUILD_DIR/var/lib/unit mkdir -p $NXT_BUILD_DIR/var/log/unit -mkdir -p $NXT_BUILD_DIR/var/run/unit > $NXT_AUTOCONF_ERR @@ -135,6 +135,7 @@ fi . auto/cgroup . auto/isolation . auto/capability +. auto/sched case "$NXT_SYSTEM_PLATFORM" in @@ -179,4 +180,5 @@ if [ $NXT_NJS != NO ]; then fi . auto/make +. auto/fuzzing . auto/summary diff --git a/docs/changes.xml b/docs/changes.xml index e291bad2..511d37e1 100644 --- a/docs/changes.xml +++ b/docs/changes.xml @@ -17,6 +17,175 @@ unit-jsc14 unit-jsc15 unit-jsc16 unit-jsc17 unit-jsc18 unit-jsc19 unit-jsc20 unit-jsc21 unit-wasm" + ver="1.33.0" rev="1" + date="" time="" + packager="Nginx Packaging <nginx-packaging@f5.com>"> + +<change> +<para> +NGINX Unit updated to 1.33.0. +</para> +</change> + +</changes> + + +<changes apply="unit" ver="1.33.0" rev="1" + date="" time="" + packager="Nginx Packaging <nginx-packaging@f5.com>"> + +<change type="feature"> +<para> +make the number of router threads configurable. +</para> +</change> + +<change type="feature"> +<para> +make the listen(2) backlog configurable. +</para> +</change> + +<change type="feature"> +<para> +add Python application factory support. +</para> +</change> + +<change type="feature"> +<para> +add experimental chunked request body support. (Disabled by default). +</para> +</change> + +<change type="feature"> +<para> +add fuzzing via oss-fuzz. +</para> +</change> + +<change type="feature"> +<para> +add "if" option to the "match" object. +</para> +</change> + +<change type="feature"> +<para> +show list of loaded language modules in the /status endpoint. +</para> +</change> + +<change type="feature"> +<para> +Unit ships with a new Rust based CLI application "unitctl". +</para> +</change> + +<change type="feature"> +<para> +the wasm-wasi-component language module now inherits the processes +environment. +</para> +</change> + +<change type="change"> +<para> +under systemd unit runs in forking mode (once again). +</para> +</change> + +<change type="change"> +<para> +if building with njs, version 0.8.3 or later is now required. +</para> +</change> + +<change type="change"> +<para> +Unit now builds with -std=gnu11 (C11 with GNU extensions). +</para> +</change> + +<change type="change"> +<para> +Unit now creates the full directory path for the PID file and control socket. +</para> +</change> + +<change type="change"> +<para> +build system improvements, including pretty printing the make output and +enabling various make variables to influence the build process (see: +make help). +</para> +</change> + +<change type="change"> +<para> +better detection of available runnable CPUs on Linux. +</para> +</change> + +<change type="change"> +<para> +default listen(2) backlog on Linux now defaults to Kernel default. +</para> +</change> + +<change type="bugfix"> +<para> +don't modify REQUEST_URI. +</para> +</change> + +<change type="bugfix"> +<para> +fix a crash when interrupting a download via a proxy. +</para> +</change> + +<change type="bugfix"> +<para> +wasm-wasi-component application process hangs after receiving restart signal +from the control endpoint. +</para> +</change> + +<change type="bugfix"> +<para> +njs variables accessed with a JS template literal should not be cacheable. +</para> +</change> + +<change type="bugfix"> +<para> +properly handle deleting arrays of certificates. +</para> +</change> + +<change type="bugfix"> +<para> +don't create the $runstatedir directory which triggered an Alpine packaging +error. +</para> +</change> + +</changes> + + +<changes apply="unit-php + unit-python unit-python2.7 + unit-python3.4 unit-python3.5 unit-python3.6 unit-python3.7 + unit-python3.8 unit-python3.9 unit-python3.10 unit-python3.11 + unit-python3.12 + unit-go + unit-perl + unit-ruby + unit-jsc-common unit-jsc8 unit-jsc10 unit-jsc11 unit-jsc13 + unit-jsc14 unit-jsc15 unit-jsc16 unit-jsc17 unit-jsc18 + unit-jsc19 unit-jsc20 unit-jsc21 + unit-wasm" ver="1.32.1" rev="1" date="2024-03-26" time="18:00:00 +0000" packager="Nginx Packaging <nginx-packaging@f5.com>"> @@ -120,7 +289,7 @@ conditional access logging. <change type="feature"> <para> -NJS variables access. +njs variables access. </para> </change> @@ -457,7 +626,7 @@ basic URI rewrite support. <change type="feature"> <para> -NJS loadable modules support. +njs loadable modules support. </para> </change> @@ -475,7 +644,7 @@ conditional logging of route selection. <change type="feature"> <para> -support the keys API on the request objects in NJS. +support the keys API on the request objects in njs. </para> </change> @@ -549,7 +718,7 @@ stop creating world-writeable directories. <change type="bugfix"> <para> -memory leak related to NJS. +memory leak related to njs. </para> </change> @@ -2503,26 +2672,6 @@ segmentation fault might have occurred if an irregular file was requested. </changes> -<changes apply="unit - unit-php - unit-python unit-python2.7 - unit-python3.4 unit-python3.5 unit-python3.6 unit-python3.7 - unit-go unit-go1.7 unit-go1.8 unit-go1.9 unit-go1.10 unit-go1.11 - unit-perl - unit-ruby - unit-jsc-common unit-jsc8 unit-jsc10 unit-jsc11" - ver="1.11.0" rev="2" - date="2019-09-29" time="13:30:00 +0300" - packager="Konstantin Pavlov <thresh@nginx.com>"> - -<change> -<para> -Bump packaging version to fix repositories inconsistencies. -</para> -</change> - -</changes> - <changes apply="unit-php unit-python unit-python2.7 diff --git a/docs/unit-openapi.yaml b/docs/unit-openapi.yaml index 2eeb9cbb..37fca500 100644 --- a/docs/unit-openapi.yaml +++ b/docs/unit-openapi.yaml @@ -2791,6 +2791,72 @@ paths: "404": $ref: "#/components/responses/responseNotFound" + /config/listeners/{listenerName}/backlog: + summary: "Endpoint for the `listeners/{listenerName}/backlog` option" + get: + operationId: getListenerBacklog + summary: "Retrieve the backlog option in a listener" + description: "Retrieves the `backlog` option that configures the + listen(2) backlog parameter where the `{listenerName}` listener object + [passes its requests](https://unit.nginx.org/configuration/#listeners)." + + tags: + - listeners + - config + + parameters: + - $ref: "#/components/parameters/listenerName" + + responses: + "200": + description: "OK; the `backlog` option exists in the configuration." + content: + application/json: + schema: + type: integer + + examples: + example1: + $ref: "#/components/examples/configListenerBacklog" + + "404": + $ref: "#/components/responses/responseNotFound" + + put: + operationId: updateListenerBacklog + summary: "Update the backlog option in a listener" + description: "Overwrites the `backlog` option." + tags: + - listeners + - config + + parameters: + - $ref: "#/components/parameters/listenerName" + + requestBody: + required: true + content: + application/json: + schema: + type: integer + + examples: + example1: + $ref: "#/components/examples/configListenerBacklog" + + responses: + "200": + $ref: "#/components/responses/responseOkUpdated" + + "400": + $ref: "#/components/responses/responseBadRequest" + + "404": + $ref: "#/components/responses/responseNotFound" + + "500": + $ref: "#/components/responses/responseInternalError" + /config/routes: summary: "Endpoint for the `routes` entity in the configuration" get: @@ -2943,6 +3009,87 @@ paths: "404": $ref: "#/components/responses/responseNotFound" + /config/settings/listen_threads: + summary: "Endpoint for the `listen_threads` option in `settings`" + get: + operationId: getSettingsListenThreads + summary: "Retrieve the listen_threads option from settings" + description: "Retrieves the `listen_threads` option that represents + Unit's [listen threads] + (https://unit.nginx.org/configuration/#settings)." + + tags: + - settings + - config + + responses: + "200": + description: "OK; the `listen_threads` option exists in the + configuration." + + content: + application/json: + schema: + type: integer + + examples: + ListenThreads: + value: 16 + + "404": + $ref: "#/components/responses/responseNotFound" + + put: + operationId: updateSettingsListenThreads + summary: "Create or overwrite the listen_threads option" + description: "Creates or overwrites the `listen_threads` option in + the configuration." + + tags: + - settings + - config + + requestBody: + required: true + content: + application/json: + schema: + type: integer + + examples: + ListenThreads: + value: 30 + + responses: + "200": + $ref: "#/components/responses/responseOkUpdated" + + "400": + $ref: "#/components/responses/responseBadRequest" + + "404": + $ref: "#/components/responses/responseNotFound" + + "500": + $ref: "#/components/responses/responseInternalError" + + delete: + operationId: deleteSettingsListenThreads + summary: "Delete the listen_threads option" + description: "Deletes the `listen_threads` option from the + configuration." + + tags: + - settings + - config + + responses: + "200": + $ref: "#/components/responses/responseOkDeleted" + + "404": + $ref: "#/components/responses/responseNotFound" + /config/settings/http: summary: "Endpoint for the `http` object in `settings`" @@ -3906,7 +4053,7 @@ paths: operationId: getStatus summary: "Retrieve the status object" description: "Retrieves the entire `/status` section that represents - Unit's [usage statistics](https://unit.nginx.org/usagestats/)." + Unit's [usage statistics and list of loaded modules](https://unit.nginx.org/statusapi/)." tags: - status @@ -3924,13 +4071,130 @@ paths: example1: $ref: "#/components/examples/status" + /status/modules: + summary: "Endpoint for the `modules` status object" + get: + operationId: getStatusModules + summary: "Retrieve the modules status object" + description: "Retrieves the `modules` status object that represents + Unit's [loaded language modules](https://unit.nginx.org/statusapi/)." + + tags: + - status + + responses: + "200": + description: "OK; the `modules` object exists in the configuration." + + content: + application/json: + schema: + $ref: "#/components/schemas/statusModules" + + examples: + example1: + $ref: "#/components/examples/statusModules" + + example2: + $ref: "#/components/examples/statusModulesArray" + + /status/modules/{langMod}: + summary: "Endpoint for the loaded language `module` object" + get: + operationId: getStatusModulesLang + summary: "Retrieve the language module object" + description: "Retrieves the language `module` object that represents a + currently loaded language module." + + tags: + - status + + parameters: + - $ref: "#/components/parameters/langMod" + + responses: + "200": + description: "OK; the language `module` object exists." + + content: + application/json: + schema: + $ref: "#/components/schemas/statusModulesLang" + + examples: + example1: + $ref: "#/components/examples/statusModulesLang" + + "404": + $ref: "#/components/responses/responseNotFound" + + /status/modules/{langMod}/version: + summary: "Endpoint for the loaded language module `version` object" + get: + operationId: getStatusModulesLangVersion + summary: "Retrieve the language module version object" + description: "Retrieves the language module `version` object that + represents the version of a currently loaded language module." + + tags: + - status + + parameters: + - $ref: "#/components/parameters/langMod" + + responses: + "200": + description: "OK; the language module `version` object exists." + + content: + application/json: + schema: + $ref: "#/components/schemas/statusModulesLangVersion" + + examples: + example1: + $ref: "#/components/examples/statusModulesLangVersion" + + "404": + $ref: "#/components/responses/responseNotFound" + + /status/modules/{langMod}/lib: + summary: "Endpoint for the loaded language module `lib` object" + get: + operationId: getStatusModulesLangLib + summary: "Retrieves the language module lib object" + description: "Retrieves the language module `lib` object that represents + the file path to the loaded language module." + + tags: + - status + + parameters: + - $ref: "#/components/parameters/langMod" + + responses: + "200": + description: "OK; the language module `lib` object exists." + + content: + application/json: + schema: + $ref: "#/components/schemas/statusModulesLangLib" + + examples: + example1: + $ref: "#/components/examples/statusModulesLangLib" + + "404": + $ref: "#/components/responses/responseNotFound" + /status/connections: summary: "Endpoint for the `connections` status object" get: operationId: getStatusConnections summary: "Retrieve the connections status object" description: "Retrieves the `connections` status object that represents - Unit's [connection statistics](https://unit.nginx.org/usagestats/)." + Unit's [connection statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -3955,7 +4219,7 @@ paths: operationId: getStatusConnectionsAccepted summary: "Retrieve the accepted connections number" description: "Retrieves the `accepted` connections number that represents - Unit's [connection statistics](https://unit.nginx.org/usagestats/)." + Unit's [connection statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -3979,7 +4243,7 @@ paths: operationId: getStatusConnectionsActive summary: "Retrieve the active connections number" description: "Retrieves the `active` connections number that represents - Unit's [connection statistics](https://unit.nginx.org/usagestats/)." + Unit's [connection statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4003,7 +4267,7 @@ paths: operationId: getStatusConnectionsIdle summary: "Retrieve the idle connections number" description: "Retrieves the `idle` connections number that represents - Unit's [connection statistics](https://unit.nginx.org/usagestats/)." + Unit's [connection statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4026,7 +4290,7 @@ paths: operationId: getStatusConnectionsClosed summary: "Retrieve the closed connections number" description: "Retrieves the `closed` connections number that represents - Unit's [connection statistics](https://unit.nginx.org/usagestats/)." + Unit's [connection statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4051,7 +4315,7 @@ paths: summary: "Retrieve the requests status object" description: "Retrieves the `requests` status object that represents Unit's instance [request statistics] - (https://unit.nginx.org/usagestats/)." + (https://unit.nginx.org/statusapi/)." tags: - status @@ -4075,7 +4339,7 @@ paths: operationId: getStatusRequestsTotal summary: "Retrieve the total requests number" description: "Retrieves the `total` requests number that represents Unit's - instance [request statistics](https://unit.nginx.org/usagestats/)." + instance [request statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4100,7 +4364,7 @@ paths: summary: "Retrieve the applications status object" description: "Retrieves the `applications` status object that represents Unit's per-app - [process and request statistics](https://unit.nginx.org/usagestats/)." + [process and request statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4125,7 +4389,7 @@ paths: summary: "Retrieve the app status object" description: "Retrieves the app status object that represents Unit's per-app - [process and request statistics](https://unit.nginx.org/usagestats/)." + [process and request statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4156,7 +4420,7 @@ paths: summary: "Retrieve the processes app status object" description: "Retrieves the `processes` app status object that represents Unit's per-app - [process statistics](https://unit.nginx.org/usagestats/)." + [process statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4186,7 +4450,7 @@ paths: summary: "Retrieve the running processes app status number" description: "Retrieves the `running` processes number that represents Unit's per-app - [process statistics](https://unit.nginx.org/usagestats/)." + [process statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4217,7 +4481,7 @@ paths: summary: "Retrieve the starting processes app status number" description: "Retrieves the `starting` processes number that represents Unit's per-app - [process statistics](https://unit.nginx.org/usagestats/)." + [process statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4248,7 +4512,7 @@ paths: summary: "Retrieve the idle processes app status number" description: "Retrieves the `idle` processes number that represents Unit's per-app - [process statistics](https://unit.nginx.org/usagestats/)." + [process statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4278,7 +4542,7 @@ paths: summary: "Retrieve the requests app status object" description: "Retrieves the `requests` app status object that represents Unit's per-app - [request statistics](https://unit.nginx.org/usagestats/)." + [request statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4306,7 +4570,7 @@ paths: summary: "Retrieve the active requests app status number" description: "Retrieves the `active` requests number that represents Unit's per-app - [request statistics](https://unit.nginx.org/usagestats/)." + [request statistics](https://unit.nginx.org/statusapi/)." tags: - status @@ -4366,6 +4630,14 @@ components: schema: type: string + langMod: + in: path + description: "A language modules name in the modules status." + name: langMod + required: true + schema: + type: string + listenerName: in: path description: "Listener name; a unique combination of a host IP address @@ -4730,6 +5002,9 @@ components: isolation: rootfs: "/www/" + wasiapp: + type: "wasm-wasi-component" + # /config/listeners configListeners: summary: "Multiple listeners" @@ -4833,6 +5108,11 @@ components: summary: "App target destination in a listener" value: "applications/php_app/index_target" + # /config/listeners/listenerName}/backlog + configListenerBacklog: + summary: "listen(2) backlog in a listener" + value: 1024 + # /config/listeners/{listenerName}/tls configListenerTls: summary: "TLS object in a listener" @@ -4990,6 +5270,10 @@ components: status: summary: "Regular status object" value: + modules: + php: + version: "8.3.4" + lib: "/opt/unit/modules/php.unit.so" connections: accepted: 1067 active: 13 @@ -5006,6 +5290,45 @@ components: requests: active: 15 + # /status/modules + statusModules: + summary: "Loaded language modules status object" + value: + php: + version: "8.3.4" + lib: "/opt/unit/modules/php.unit.so" + + statusModulesArray: + summary: "Loaded language modules status array" + value: + php: + version: "8.3.4" + lib: "/opt/unit/modules/php.unit.so" + python: + - { version: "3.12.3", lib: "/opt/unit/modules/python.unit.so" } + - { version: "3.11.1", lib: "/opt/unit/modules/python-3.11.1.unit.so" } + wasm: + version: "0.2" + lib: "/opt/unit/modules/wasm.unit.so" + + # /status/modules/{langMod} + statusModulesLang: + summary: "Object or array of objects of specified language module" + value: + python: + version: "3.12.3" + lib: "/opt/unit/modules/python.unit.so" + + # /status/modules/{langMod}/version + statusModulesLangVersion: + summary: "String describing the version of the language module" + value: "3.12.3" + + # /status/modules/{langMod}/lib + statusModulesLangLib: + summary: "String describing the path to the loaded language module" + value: "/opt/unit/modules/python.unit.so" + # /status/connections statusConnections: summary: "Regular connections status object" @@ -5304,6 +5627,8 @@ components: - $ref: "#/components/schemas/configApplicationPHP" - $ref: "#/components/schemas/configApplicationPython" - $ref: "#/components/schemas/configApplicationRuby" + - $ref: "#/components/schemas/configApplicationWasm" + - $ref: "#/components/schemas/configApplicationWasi" discriminator: propertyName: type @@ -5314,6 +5639,8 @@ components: php: "#/components/schemas/configApplicationPHP" python: "#/components/schemas/configApplicationPython" ruby: "#/components/schemas/configApplicationRuby" + wasm: "#/components/schemas/configApplicationWasm" + wasm-wasi-component: "#/components/schemas/configApplicationWasi" # ABSTRACT BASE SCHEMA, NOT PRESENT IN THE CONFIGURATION; STORES COMMON OPTIONS configApplicationCommon: @@ -5326,7 +5653,7 @@ components: type: type: string description: "Application type and language version." - enum: [external, java, perl, php, python, ruby] + enum: [external, java, perl, php, python, ruby, wasm, wasm-wasi-component] environment: type: object @@ -5592,6 +5919,82 @@ components: description: "Number of worker threads per app process." default: 1 + configApplicationWasm: + description: "WASM application on Unit." + allOf: + - $ref: "#/components/schemas/configApplicationCommon" + - type: object + required: + - module + - request_handler + - malloc_handler + - free_handler + + properties: + module: + type: string + description: "Path to WebAssembly module." + + request_handler: + type: string + description: "Name of request handling function." + + malloc_handler: + type: string + description: "Name of memory allocator function." + + free_handler: + type: string + description: "Name of memory free function." + + access: + type: object + properties: + filesystem: + $ref: "#/components/schemas/stringArray" + description: "Host directories this application may have access to." + + module_init_handler: + type: string + description: "Name of function called to initialize module." + + module_end_handler: + type: string + description: "Name of function called to teardown module." + + request_init_handler: + type: string + description: "Name of function called to initialize request." + + request_end_handler: + type: string + description: "Name of function called to teardown request." + + response_end_handler: + type: string + description: "Name of function called to teardown response." + + + configApplicationWasi: + description: "WASI application on Unit." + allOf: + - $ref: "#/components/schemas/configApplicationCommon" + - type: object + required: + - component + + properties: + component: + type: string + description: "Path to wasm wasi component application." + + access: + type: object + properties: + filesystem: + $ref: "#/components/schemas/stringArray" + description: "Host directories this application may have access to." + configApplicationPHP: description: "PHP application on Unit." allOf: @@ -6236,9 +6639,13 @@ components: # /status status: - description: "Represents Unit's usage statistics." + description: "Represents Unit's loaded language modules and usage + statistics." type: object properties: + modules: + $ref: "#/components/schemas/statusModules" + connections: $ref: "#/components/schemas/statusConnections" @@ -6248,6 +6655,29 @@ components: applications: $ref: "#/components/schemas/statusApplications" + # /status/modules + statusModules: + description: "Lists currently loaded language modules." + type: object + + # /status/modules/{langMod} + statusModulesLang: + description: "Lists currently loaded versions of the specified language + module." + oneOf: + - type: string + - type: object + + # /status/modules/{langMod}/version + statusModulesLangVersion: + description: "Describes the version of the specified language module." + type: string + + # /status/modules/{langMod}/lib + statusModulesLangLib: + description: "Describes the path to the specified language module." + type: string + # /status/applications statusApplications: description: "Lists Unit's application process and request statistics." @@ -6372,7 +6802,7 @@ tags: - name: status description: Everything about the /status section in Unit's control API externalDocs: - url: https://unit.nginx.org/usagestats/ + url: https://unit.nginx.org/statusapi/ - name: tls description: Everything about SSL/TLS in Unit's control API diff --git a/fuzzing/.gitattributes b/fuzzing/.gitattributes new file mode 100644 index 00000000..70e0757b --- /dev/null +++ b/fuzzing/.gitattributes @@ -0,0 +1 @@ +*.bin whitespace=-blank-at-eol,-blank-at-eof diff --git a/fuzzing/README.md b/fuzzing/README.md new file mode 100644 index 00000000..9c70c801 --- /dev/null +++ b/fuzzing/README.md @@ -0,0 +1,68 @@ +# Fuzzing unit + +These tests are generally advised to run only on GNU/Linux. + +## Build fuzzers using libFuzzer. + +Running `sh fuzzing/build-fuzz.sh` can build all the fuzzers with standard +`ASan` and `UBSan`. + +### More comprehensive How-to Guide. + +#### Export flags that are to be used by Unit for fuzzing. + +Note that in `CFLAGS` and `CXXFLAGS`, any type of sanitizers can be added. + +- [AddressSanitizer](https://clang.llvm.org/docs/AddressSanitizer.html), + [ThreadSanitizer](https://clang.llvm.org/docs/ThreadSanitizer.html), + [MemorySanitizer](https://clang.llvm.org/docs/MemorySanitizer.html), + [UndefinedBehaviorSanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html), + [LeakSanitizer](https://clang.llvm.org/docs/LeakSanitizer.html). + +```shell +$ export CC=clang +$ export CXX=clang++ +$ export CFLAGS="-g -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=fuzzer-no-link" +$ export CXXFLAGS="-g -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=fuzzer-no-link" +$ export LIB_FUZZING_ENGINE="-fsanitize=fuzzer" +``` + +#### Build Unit for Fuzzing. + +```shell +$ ./configure --no-regex --no-pcre2 --fuzz=$LIB_FUZZING_ENGINE +$ make fuzz -j$(nproc) +``` + +#### Running fuzzers. + +```shell +$ mkdir -p build/fuzz_basic_seed +$ mkdir -p build/fuzz_http_controller_seed +$ mkdir -p build/fuzz_http_h1p_seed +$ mkdir -p build/fuzz_http_h1p_peer_seed +$ mkdir -p build/fuzz_json_seed + +$ ./build/fuzz_basic build/fuzz_basic_seed fuzzing/fuzz_basic_seed_corpus +$ ./build/fuzz_http_controller build/fuzz_http_controller_seed fuzzing/fuzz_http_seed_corpus +$ ./build/fuzz_http_h1p build/fuzz_http_h1p_seed fuzzing/fuzz_http_seed_corpus +$ ./build/fuzz_http_h1p_peer build/fuzz_http_h1p_peer_seed fuzzing/fuzz_http_seed_corpus +$ ./build/fuzz_json build/fuzz_json_seed fuzzing/fuzz_json_seed_corpus +``` + +Here is more information about [LibFuzzer](https://llvm.org/docs/LibFuzzer.html). + +## Build fuzzers using other fuzzing engines. + +- [Honggfuzz](https://github.com/google/honggfuzz/blob/master/docs/PersistentFuzzing.md). +- [AFLplusplus](https://github.com/AFLplusplus/AFLplusplus/blob/stable/utils/aflpp_driver/README.md). + + +## Requirements. + +You will likely need at least the following packages installed (package names +may vary). + +``` +clang, llvm & compiler-rt +``` diff --git a/fuzzing/build-fuzz.sh b/fuzzing/build-fuzz.sh new file mode 100644 index 00000000..62f7a676 --- /dev/null +++ b/fuzzing/build-fuzz.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +export CC=clang +export CXX=clang++ +export CFLAGS="-g -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=address,undefined -fsanitize=fuzzer-no-link" +export CXXFLAGS="-g -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=address,undefined -fsanitize=fuzzer-no-link" +export LIB_FUZZING_ENGINE="-fsanitize=fuzzer" + +./configure --no-regex --no-pcre2 --fuzz=$LIB_FUZZING_ENGINE +make fuzz -j$(nproc) + +mkdir -p build/fuzz_basic_seed +mkdir -p build/fuzz_http_controller_seed +mkdir -p build/fuzz_http_h1p_seed +mkdir -p build/fuzz_http_h1p_peer_seed +mkdir -p build/fuzz_json_seed + +echo "" +echo "Run: ./build/\${fuzzer} build/\${fuzzer}_seed fuzzing/\${fuzzer}_seed_corpus" +echo "" diff --git a/fuzzing/fuzz_basic_seed_corpus/base64_0.bin b/fuzzing/fuzz_basic_seed_corpus/base64_0.bin Binary files differnew file mode 100644 index 00000000..71501405 --- /dev/null +++ b/fuzzing/fuzz_basic_seed_corpus/base64_0.bin diff --git a/fuzzing/fuzz_basic_seed_corpus/term_0.bin b/fuzzing/fuzz_basic_seed_corpus/term_0.bin Binary files differnew file mode 100644 index 00000000..c7fff416 --- /dev/null +++ b/fuzzing/fuzz_basic_seed_corpus/term_0.bin diff --git a/fuzzing/fuzz_basic_seed_corpus/term_1.bin b/fuzzing/fuzz_basic_seed_corpus/term_1.bin Binary files differnew file mode 100644 index 00000000..bd03def0 --- /dev/null +++ b/fuzzing/fuzz_basic_seed_corpus/term_1.bin diff --git a/fuzzing/fuzz_basic_seed_corpus/utf8_0.bin b/fuzzing/fuzz_basic_seed_corpus/utf8_0.bin Binary files differnew file mode 100644 index 00000000..d395758e --- /dev/null +++ b/fuzzing/fuzz_basic_seed_corpus/utf8_0.bin diff --git a/fuzzing/fuzz_http.dict b/fuzzing/fuzz_http.dict new file mode 100644 index 00000000..0a198414 --- /dev/null +++ b/fuzzing/fuzz_http.dict @@ -0,0 +1,38 @@ +"Accept-Encoding" +"Accept-Language" +"Accept" +"Authorization" +"Cache-Control" +"Connection" +"Content-Length" +"Content-Range" +"Content-Type" +"Cookie" +"Date" +"Expect" +"Host" +"If-Match" +"If-Modified-Since" +"If-None-Match" +"If-Range" +"If-Unmodified-Since" +"Keep-Alive" +"Origin" +"Pragma" +"Range" +"Referer" +"Sec-WebSocket-Key" +"Sec-WebSocket-Version" +"Server" +"TE" +"Transfer-Encoding" +"Upgrade-Insecure-Requests" +"Upgrade" +"User-Agent" +"Via" +"X-Forwarded-For" +"X-Forwarded-Host" +"X-Forwarded-Proto" +"X-Http-Method-Override" +"X-Real-IP" +"X-Request-ID" diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_bench.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_bench.bin new file mode 100644 index 00000000..64e2f7e8 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_bench.bin @@ -0,0 +1,16 @@ +POST /path/to/very/interesting/article/on.this.site?arg1=value&arg2=value2&very_big_arg=even_bigger_value HTTP/1.1
+Host: www.example.com
+User-Agent: Mozilla/5.0 (X11; Gentoo Linux x86_64; rv:42.0) Firefox/42.0
+Accept: text/html,application/json,application/xml;q=0.9,*/*;q=0.8
+Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4
+Accept-Encoding: gzip, deflate, br
+If-Modified-Since: Wed, 31 Dec 1986 16:00:00 GMT
+Referer: https://example.org/path/to/not-interesting/article.html
+Cookie: name=value; name2=value2; some_big_cookie=Olr+/9hoA0og/dAcHH1p8sEFAHAAAAAElFTkSuQmCC
+Connection: keep-alive
+Content-Length: 0
+Upgrade-Insecure-Requests: 1
+Pragma: no-cache
+Cache-Control: no-cache
+X-Forwarded-For: 192.0.2.0, 198.51.100.0, 203.0.113.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_0.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_0.bin new file mode 100644 index 00000000..00ff084d --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_0.bin @@ -0,0 +1,2 @@ +XXX-METHOD /d.ir/fi+le.ext?key=val HTTP/1.2 + diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_1.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_1.bin new file mode 100644 index 00000000..2f6c6149 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_1.bin @@ -0,0 +1,2 @@ +GEt / HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_10.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_10.bin new file mode 100644 index 00000000..03337016 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_10.bin @@ -0,0 +1,2 @@ +GET /na %20me.ext?args HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_11.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_11.bin new file mode 100644 index 00000000..bac5bc27 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_11.bin @@ -0,0 +1,2 @@ +GET / HTTP/1.0 HTTP/1.1
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_12.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_12.bin new file mode 100644 index 00000000..75bd72e2 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_12.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host:example.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_13.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_13.bin new file mode 100644 index 00000000..2216ec8d --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_13.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host:
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_14.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_14.bin new file mode 100644 index 00000000..12435096 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_14.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+:Host: example.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_15.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_15.bin new file mode 100644 index 00000000..88bb36f4 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_15.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Ho_st: example.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_16.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_16.bin new file mode 100644 index 00000000..ce7453c2 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_16.bin @@ -0,0 +1,4 @@ +GET / HTTP/1.1
+Ho +st: example.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_17.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_17.bin new file mode 100644 index 00000000..5016e0df --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_17.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host: exa
mple.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_18.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_18.bin new file mode 100644 index 00000000..d2409a88 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_18.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host: example.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_19.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_19.bin new file mode 100644 index 00000000..40e39921 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_19.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+!#$%&'*+.^_`|~: allowed
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_2.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_2.bin new file mode 100644 index 00000000..cfc0d81a --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_2.bin @@ -0,0 +1,3 @@ +GET / + HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_20.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_20.bin new file mode 100644 index 00000000..b1deb571 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_20.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host: xn--e1afmkfd.xn--80akhbyknj4f
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_21.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_21.bin new file mode 100644 index 00000000..89565fd8 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_21.bin @@ -0,0 +1,4 @@ +GET / HTTP/1.1
+Host: exa +mple.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_22.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_22.bin new file mode 100644 index 00000000..3e0f8f6a --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_22.bin @@ -0,0 +1,3 @@ +GET / HTTP/1.1
+Host: exa mple.com
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_23.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_23.bin new file mode 100644 index 00000000..da0661e5 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_23.bin @@ -0,0 +1,5 @@ +GET / HTTP/1.1
+X-Unknown-Header: value
+X-Good-Header: value
+!#$%&'*+.^_`|~: skipped
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_24.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_24.bin new file mode 100644 index 00000000..6b5232e3 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_24.bin @@ -0,0 +1,5 @@ +GET / HTTP/1.1
+X-Good-Header: value
+X-Unknown-Header: value
+X-Bad-Header: value
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_3.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_3.bin new file mode 100644 index 00000000..20afdfb4 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_3.bin @@ -0,0 +1 @@ +GET / HTTP/1.0
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_4.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_4.bin new file mode 100644 index 00000000..22b52346 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_4.bin @@ -0,0 +1 @@ +GET / HTTP/2.0
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_5.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_5.bin new file mode 100644 index 00000000..2da59689 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_5.bin @@ -0,0 +1,2 @@ +GET /. HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_6.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_6.bin new file mode 100644 index 00000000..9cf4c094 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_6.bin @@ -0,0 +1,2 @@ +GET /# HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_7.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_7.bin new file mode 100644 index 00000000..d02576e0 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_7.bin @@ -0,0 +1,2 @@ +GET /?# HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_8.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_8.bin new file mode 100644 index 00000000..fa246dc4 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_8.bin @@ -0,0 +1,2 @@ +GET // HTTP/1.0
+
diff --git a/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_9.bin b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_9.bin new file mode 100644 index 00000000..2668f283 --- /dev/null +++ b/fuzzing/fuzz_http_seed_corpus/nxt_http_test_run_9.bin @@ -0,0 +1,2 @@ +GET /%20 HTTP/1.0
+
diff --git a/fuzzing/fuzz_json_seed_corpus/json_0.bin b/fuzzing/fuzz_json_seed_corpus/json_0.bin new file mode 100644 index 00000000..450e2283 --- /dev/null +++ b/fuzzing/fuzz_json_seed_corpus/json_0.bin @@ -0,0 +1 @@ +[{"container": 1000, "host": 0, "size": 1},{"container": 10000, "host": 10000, "size": 1}, {"container": 60000, "host": 60000, "size": 1}]
\ No newline at end of file diff --git a/fuzzing/fuzz_json_seed_corpus/json_1.bin b/fuzzing/fuzz_json_seed_corpus/json_1.bin new file mode 100644 index 00000000..0637a088 --- /dev/null +++ b/fuzzing/fuzz_json_seed_corpus/json_1.bin @@ -0,0 +1 @@ +[]
\ No newline at end of file diff --git a/fuzzing/fuzz_json_seed_corpus/json_2.bin b/fuzzing/fuzz_json_seed_corpus/json_2.bin new file mode 100644 index 00000000..5c1201b8 --- /dev/null +++ b/fuzzing/fuzz_json_seed_corpus/json_2.bin @@ -0,0 +1 @@ +[{"container": 0, "host": 0, "size": 1}]
\ No newline at end of file diff --git a/fuzzing/fuzz_json_seed_corpus/json_3.bin b/fuzzing/fuzz_json_seed_corpus/json_3.bin new file mode 100644 index 00000000..e040c9b7 --- /dev/null +++ b/fuzzing/fuzz_json_seed_corpus/json_3.bin @@ -0,0 +1 @@ +[{"container": 1000, "host": 0, "size": 1}]
\ No newline at end of file diff --git a/fuzzing/fuzz_json_seed_corpus/json_4.bin b/fuzzing/fuzz_json_seed_corpus/json_4.bin new file mode 100644 index 00000000..30a2bc50 --- /dev/null +++ b/fuzzing/fuzz_json_seed_corpus/json_4.bin @@ -0,0 +1 @@ +[{"container": 0, "host": 1000, "size": 1}, {"container": 1000, "host": 2000, "size": 1}]
\ No newline at end of file diff --git a/fuzzing/nxt_basic_fuzz.c b/fuzzing/nxt_basic_fuzz.c new file mode 100644 index 00000000..5f71a909 --- /dev/null +++ b/fuzzing/nxt_basic_fuzz.c @@ -0,0 +1,236 @@ +/* + * Copyright (C) NGINX, Inc. + */ + +#include <nxt_main.h> +#include <nxt_sha1.h> +#include <nxt_websocket.h> +#include <nxt_websocket_header.h> + +/* DO NOT TRY THIS AT HOME! */ +#include <nxt_websocket_accept.c> + + +#define KMININPUTLENGTH 4 +#define KMAXINPUTLENGTH 128 + + +extern int LLVMFuzzerInitialize(int *argc, char ***argv); +extern int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + +void nxt_base64_fuzz(const u_char *data, size_t size); +void nxt_djb_hash_fuzz(const u_char *data, size_t size); +void nxt_murmur_hash2_fuzz(const u_char *data, size_t size); +void nxt_parse_fuzz(const u_char *data, size_t size); +void nxt_sha1_fuzz(const u_char *data, size_t size); +void nxt_sha1_update_fuzz(const u_char *data, size_t size); +void nxt_term_fuzz(const u_char *data, size_t size); +void nxt_time_fuzz(const u_char *data, size_t size); +void nxt_uri_fuzz(const u_char *data, size_t size); +void nxt_utf8_fuzz(const u_char *data, size_t size); +void nxt_websocket_base64_fuzz(const u_char *data, size_t size); +void nxt_websocket_frame_fuzz(const u_char *data, size_t size); + + +extern char **environ; + + +int +LLVMFuzzerInitialize(int *argc, char ***argv) +{ + if (nxt_lib_start("fuzzing", NULL, &environ) != NXT_OK) { + return NXT_ERROR; + } + + return 0; +} + + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + if (size < KMININPUTLENGTH || size > KMAXINPUTLENGTH) { + return 0; + } + + nxt_base64_fuzz(data, size); + nxt_djb_hash_fuzz(data, size); + nxt_murmur_hash2_fuzz(data, size); + nxt_parse_fuzz(data, size); + nxt_sha1_fuzz(data, size); + nxt_sha1_update_fuzz(data, size); + nxt_term_fuzz(data, size); + nxt_time_fuzz(data, size); + nxt_uri_fuzz(data, size); + nxt_utf8_fuzz(data, size); + nxt_websocket_base64_fuzz(data, size); + nxt_websocket_frame_fuzz(data, size); + + return 0; +} + + +void +nxt_base64_fuzz(const u_char *data, size_t size) +{ + u_char buf[256]; + ssize_t ret; + + /* + * Validate base64 data before decoding. + */ + ret = nxt_base64_decode(NULL, (u_char *)data, size); + if (ret == NXT_ERROR) { + return; + } + + nxt_base64_decode(buf, (u_char *)data, size); +} + + +void +nxt_djb_hash_fuzz(const u_char *data, size_t size) +{ + nxt_djb_hash(data, size); + nxt_djb_hash_lowcase(data, size); +} + + +void +nxt_murmur_hash2_fuzz(const u_char *data, size_t size) +{ + nxt_murmur_hash2(data, size); + nxt_murmur_hash2_uint32(data); +} + + +void +nxt_parse_fuzz(const u_char *data, size_t size) +{ + nxt_str_t input; + + input.start = (u_char *)data; + input.length = size; + + nxt_int_parse(data, size); + nxt_size_t_parse(data, size); + nxt_size_parse(data, size); + nxt_off_t_parse(data, size); + nxt_str_int_parse(&input); + nxt_number_parse(&data, data + size); +} + + +void +nxt_sha1_fuzz(const u_char *data, size_t size) +{ + u_char bin_accept[20]; + nxt_sha1_t ctx; + + nxt_sha1_init(&ctx); + nxt_sha1_update(&ctx, data, size); + nxt_sha1_final(bin_accept, &ctx); +} + + +void +nxt_sha1_update_fuzz(const u_char *data, size_t size) +{ + u_char bin_accept[20]; + nxt_sha1_t ctx; + + nxt_sha1_init(&ctx); + nxt_sha1_update(&ctx, data, size); + nxt_sha1_update(&ctx, data, size); + nxt_sha1_final(bin_accept, &ctx); +} + + +void +nxt_term_fuzz(const u_char *data, size_t size) +{ + nxt_term_parse(data, size, 0); + nxt_term_parse(data, size, 1); +} + + +void +nxt_time_fuzz(const u_char *data, size_t size) +{ + nxt_time_parse(data, size); +} + + +void +nxt_uri_fuzz(const u_char *data, size_t size) +{ + u_char *dst; + + dst = nxt_zalloc(size * 3); + if (dst == NULL) { + return; + } + + nxt_decode_uri(dst, (u_char *)data, size); + nxt_decode_uri_plus(dst, (u_char *)data, size); + + nxt_memzero(dst, size * 3); + nxt_encode_uri(NULL, (u_char *)data, size); + nxt_encode_uri(dst, (u_char *)data, size); + + nxt_free(dst); +} + + +void +nxt_utf8_fuzz(const u_char *data, size_t size) +{ + const u_char *in; + + in = data; + nxt_utf8_decode(&in, data + size); + + nxt_utf8_casecmp((const u_char *)"ABC АБВ ΑΒΓ", + data, + nxt_length("ABC АБВ ΑΒΓ"), + size); +} + + +void +nxt_websocket_base64_fuzz(const u_char *data, size_t size) +{ + u_char *out; + + out = nxt_zalloc(size * 2); + if (out == NULL) { + return; + } + + nxt_websocket_base64_encode(out, data, size); + + nxt_free(out); +} + + +void +nxt_websocket_frame_fuzz(const u_char *data, size_t size) +{ + u_char *input; + + /* + * Resolve overwrites-const-input by using a copy of the data. + */ + input = nxt_malloc(size); + if (input == NULL) { + return; + } + + nxt_memcpy(input, data, size); + + nxt_websocket_frame_init(input, 0); + nxt_websocket_frame_header_size(input); + nxt_websocket_frame_payload_len(input); + + nxt_free(input); +} diff --git a/fuzzing/nxt_http_controller_fuzz.c b/fuzzing/nxt_http_controller_fuzz.c new file mode 100644 index 00000000..25527ae1 --- /dev/null +++ b/fuzzing/nxt_http_controller_fuzz.c @@ -0,0 +1,98 @@ +/* + * Copyright (C) NGINX, Inc. + */ + +#include <nxt_main.h> + +/* DO NOT TRY THIS AT HOME! */ +#include "nxt_controller.c" + + +#define KMININPUTLENGTH 2 +#define KMAXINPUTLENGTH 1024 + + +extern int LLVMFuzzerInitialize(int *argc, char ***argv); +extern int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + + +extern char **environ; + + +int +LLVMFuzzerInitialize(int *argc, char ***argv) +{ + nxt_int_t ret; + + if (nxt_lib_start("fuzzing", NULL, &environ) != NXT_OK) { + return NXT_ERROR; + } + + ret = nxt_http_fields_hash(&nxt_controller_fields_hash, + nxt_controller_request_fields, + nxt_nitems(nxt_controller_request_fields)); + if (ret != NXT_OK) { + return NXT_ERROR; + } + + return 0; +} + + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + nxt_mp_t *mp; + nxt_int_t rc; + nxt_buf_mem_t buf; + nxt_controller_request_t *req; + nxt_http_request_parse_t rp; + + if (size < KMININPUTLENGTH || size > KMAXINPUTLENGTH) { + return 0; + } + + mp = nxt_mp_create(1024, 128, 256, 32); + if (mp == NULL) { + return 0; + } + + req = nxt_mp_zget(mp, sizeof(nxt_controller_request_t)); + if (req == NULL) { + goto failed; + } + + req->conn = nxt_mp_zget(mp, sizeof(nxt_conn_t)); + if (req->conn == NULL) { + goto failed; + } + + buf.start = (u_char *)data; + buf.end = (u_char *)data + size; + buf.pos = buf.start; + buf.free = buf.end; + + nxt_main_log.level = NXT_LOG_ALERT; + req->conn->log = nxt_main_log; + + nxt_memzero(&rp, sizeof(nxt_http_request_parse_t)); + + rc = nxt_http_parse_request_init(&rp, mp); + if (rc != NXT_OK) { + goto failed; + } + + rc = nxt_http_parse_request(&rp, &buf); + if (rc != NXT_DONE) { + goto failed; + } + + nxt_http_fields_process(rp.fields, &nxt_controller_fields_hash, + req); + +failed: + + nxt_mp_destroy(mp); + + return 0; +} diff --git a/fuzzing/nxt_http_h1p_fuzz.c b/fuzzing/nxt_http_h1p_fuzz.c new file mode 100644 index 00000000..b8893ad6 --- /dev/null +++ b/fuzzing/nxt_http_h1p_fuzz.c @@ -0,0 +1,106 @@ +/* + * Copyright (C) NGINX, Inc. + */ + +#include <nxt_main.h> + +/* DO NOT TRY THIS AT HOME! */ +#include "nxt_h1proto.c" + + +#define KMININPUTLENGTH 2 +#define KMAXINPUTLENGTH 1024 + + +extern int LLVMFuzzerInitialize(int *argc, char ***argv); +extern int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + + +extern char **environ; + + +int +LLVMFuzzerInitialize(int *argc, char ***argv) +{ + nxt_int_t ret; + + if (nxt_lib_start("fuzzing", NULL, &environ) != NXT_OK) { + return NXT_ERROR; + } + + ret = nxt_http_fields_hash(&nxt_h1p_fields_hash, + nxt_h1p_fields, nxt_nitems(nxt_h1p_fields)); + if (ret != NXT_OK) { + return NXT_ERROR; + } + + return 0; +} + + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + nxt_mp_t *mp; + nxt_int_t rc; + nxt_buf_mem_t buf; + nxt_http_request_t *req; + nxt_http_request_parse_t rp; + + if (size < KMININPUTLENGTH || size > KMAXINPUTLENGTH) { + return 0; + } + + mp = nxt_mp_create(1024, 128, 256, 32); + if (mp == NULL) { + return 0; + } + + req = nxt_mp_zget(mp, sizeof(nxt_http_request_t)); + if (req == NULL) { + goto failed; + } + + req->proto.h1 = nxt_mp_zget(mp, sizeof(nxt_h1proto_t)); + if (req->proto.h1 == NULL) { + goto failed; + } + + req->conf = nxt_mp_zget(mp, sizeof(nxt_socket_conf_joint_t)); + if (req->conf == NULL) { + goto failed; + } + + req->conf->socket_conf = nxt_mp_zget(mp, sizeof(nxt_socket_conf_t)); + if (req->conf->socket_conf == NULL) { + goto failed; + } + + buf.start = (u_char *)data; + buf.end = (u_char *)data + size; + buf.pos = buf.start; + buf.free = buf.end; + + req->mem_pool = mp; + req->conf->socket_conf->max_body_size = 8 * 1024 * 1024; + + nxt_memzero(&rp, sizeof(nxt_http_request_parse_t)); + + rc = nxt_http_parse_request_init(&rp, mp); + if (rc != NXT_OK) { + goto failed; + } + + rc = nxt_http_parse_request(&rp, &buf); + if (rc != NXT_DONE) { + goto failed; + } + + nxt_http_fields_process(rp.fields, &nxt_h1p_fields_hash, req); + +failed: + + nxt_mp_destroy(mp); + + return 0; +} diff --git a/fuzzing/nxt_http_h1p_peer_fuzz.c b/fuzzing/nxt_http_h1p_peer_fuzz.c new file mode 100644 index 00000000..43786d0c --- /dev/null +++ b/fuzzing/nxt_http_h1p_peer_fuzz.c @@ -0,0 +1,99 @@ +/* + * Copyright (C) NGINX, Inc. + */ + +#include <nxt_main.h> + +/* DO NOT TRY THIS AT HOME! */ +#include "nxt_h1proto.c" + + +#define KMININPUTLENGTH 2 +#define KMAXINPUTLENGTH 1024 + + +extern int LLVMFuzzerInitialize(int *argc, char ***argv); +extern int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + + +extern char **environ; + + +int +LLVMFuzzerInitialize(int *argc, char ***argv) +{ + nxt_int_t ret; + + if (nxt_lib_start("fuzzing", NULL, &environ) != NXT_OK) { + return NXT_ERROR; + } + + ret = nxt_http_fields_hash(&nxt_h1p_peer_fields_hash, + nxt_h1p_peer_fields, + nxt_nitems(nxt_h1p_peer_fields)); + if (ret != NXT_OK) { + return NXT_ERROR; + } + + return 0; +} + + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + nxt_mp_t *mp; + nxt_int_t rc; + nxt_buf_mem_t buf; + nxt_http_request_t *req; + nxt_http_request_parse_t rp; + + if (size < KMININPUTLENGTH || size > KMAXINPUTLENGTH) { + return 0; + } + + mp = nxt_mp_create(1024, 128, 256, 32); + if (mp == NULL) { + return 0; + } + + req = nxt_mp_zget(mp, sizeof(nxt_http_request_t)); + if (req == NULL) { + goto failed; + } + + req->peer = nxt_mp_zalloc(mp, sizeof(nxt_http_peer_t)); + if (req->peer == NULL) { + goto failed; + } + + req->peer->proto.h1 = nxt_mp_zalloc(mp, sizeof(nxt_h1proto_t)); + if (req->peer->proto.h1 == NULL) { + goto failed; + } + + buf.start = (u_char *)data; + buf.end = (u_char *)data + size; + buf.pos = buf.start; + buf.free = buf.end; + + nxt_memzero(&rp, sizeof(nxt_http_request_parse_t)); + + rc = nxt_http_parse_request_init(&rp, mp); + if (rc != NXT_OK) { + goto failed; + } + + rc = nxt_http_parse_request(&rp, &buf); + if (rc != NXT_DONE) { + goto failed; + } + + nxt_http_fields_process(rp.fields, &nxt_h1p_peer_fields_hash, req); + +failed: + + nxt_mp_destroy(mp); + + return 0; +} diff --git a/fuzzing/nxt_json_fuzz.c b/fuzzing/nxt_json_fuzz.c new file mode 100644 index 00000000..fa222988 --- /dev/null +++ b/fuzzing/nxt_json_fuzz.c @@ -0,0 +1,97 @@ +/* + * Copyright (C) NGINX, Inc. + */ + +#include <nxt_main.h> +#include <nxt_conf.h> +#include <nxt_router.h> + +#define KMININPUTLENGTH 2 +#define KMAXINPUTLENGTH 1024 + + +extern int LLVMFuzzerInitialize(int *argc, char ***argv); +extern int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size); + + +extern char **environ; + + +int +LLVMFuzzerInitialize(int *argc, char ***argv) +{ + if (nxt_lib_start("fuzzing", NULL, &environ) != NXT_OK) { + return NXT_ERROR; + } + + return 0; +} + + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + nxt_mp_t *mp; + nxt_str_t input; + nxt_thread_t *thr; + nxt_runtime_t *rt; + nxt_conf_value_t *conf; + nxt_conf_validation_t vldt; + nxt_conf_json_pretty_t pretty; + + if (size < KMININPUTLENGTH || size > KMAXINPUTLENGTH) { + return 0; + } + + thr = nxt_thread(); + + mp = nxt_mp_create(1024, 128, 256, 32); + if (mp == NULL) { + return 0; + } + + rt = nxt_mp_zget(mp, sizeof(nxt_runtime_t)); + if (rt == NULL) { + goto failed; + } + + rt->languages = nxt_array_create(mp, 1, sizeof(nxt_app_lang_module_t)); + if (rt->languages == NULL) { + goto failed; + } + + input.start = (u_char *)data; + input.length = size; + + thr->runtime = rt; + rt->mem_pool = mp; + + nxt_memzero(&pretty, sizeof(nxt_conf_json_pretty_t)); + nxt_memzero(&vldt, sizeof(nxt_conf_validation_t)); + + conf = nxt_conf_json_parse_str(mp, &input); + if (conf == NULL) { + goto failed; + } + + nxt_conf_json_length(conf, NULL); + nxt_conf_json_length(conf, &pretty); + + vldt.pool = nxt_mp_create(1024, 128, 256, 32); + if (vldt.pool == NULL) { + goto failed; + } + + vldt.conf = conf; + vldt.conf_pool = mp; + vldt.ver = NXT_VERNUM; + + nxt_conf_validate(&vldt); + nxt_mp_destroy(vldt.pool); + +failed: + + nxt_mp_destroy(mp); + + return 0; +} diff --git a/fuzzing/oss-fuzz.sh b/fuzzing/oss-fuzz.sh new file mode 100644 index 00000000..1fe5ecda --- /dev/null +++ b/fuzzing/oss-fuzz.sh @@ -0,0 +1,29 @@ +#!/bin/bash -eu + +# Build unit +./configure --no-regex --no-pcre2 --fuzz="$LIB_FUZZING_ENGINE" +make fuzz -j"$(nproc)" + +# Copy all fuzzers. +cp build/fuzz_* $OUT/ + +# cd into fuzzing dir +pushd fuzzing/ +cp fuzz_http.dict $OUT/fuzz_http_controller.dict +cp fuzz_http.dict $OUT/fuzz_http_h1p.dict +cp fuzz_http.dict $OUT/fuzz_http_h1p_peer.dict + +# Create temporary directories. +cp -r fuzz_http_seed_corpus/ fuzz_http_controller_seed_corpus/ +cp -r fuzz_http_seed_corpus/ fuzz_http_h1p_seed_corpus/ +cp -r fuzz_http_seed_corpus/ fuzz_http_h1p_peer_seed_corpus/ + +zip -r $OUT/fuzz_basic_seed_corpus.zip fuzz_basic_seed_corpus/ +zip -r $OUT/fuzz_http_controller_seed_corpus.zip fuzz_http_controller_seed_corpus/ +zip -r $OUT/fuzz_http_h1p_seed_corpus.zip fuzz_http_h1p_seed_corpus/ +zip -r $OUT/fuzz_http_h1p_peer_seed_corpus.zip fuzz_http_h1p_peer_seed_corpus/ +zip -r $OUT/fuzz_json_seed_corpus.zip fuzz_json_seed_corpus/ + +# Delete temporary directories. +rm -r fuzz_http_controller_seed_corpus/ fuzz_http_h1p_seed_corpus/ fuzz_http_h1p_peer_seed_corpus/ +popd diff --git a/pkg/contrib/Makefile b/pkg/contrib/Makefile index 7e3b8b97..0f412b20 100644 --- a/pkg/contrib/Makefile +++ b/pkg/contrib/Makefile @@ -51,7 +51,7 @@ XZ ?= $(error XZ (LZMA) compressor not found) endif ifeq ($(shell sha512sum --version >/dev/null 2>&1 || echo FAIL),) -SHA512SUM = sha512sum --check +SHA512SUM = sha512sum -c - else ifeq ($(shell shasum --version >/dev/null 2>&1 || echo FAIL),) SHA512SUM = shasum -a 512 --check else ifeq ($(shell openssl version >/dev/null 2>&1 || echo FAIL),) @@ -89,10 +89,10 @@ checksum = \ (cd $(TARBALLS) && $(1)) CHECK_SHA512 = $(call checksum,$(SHA512SUM),SHA512,.sum-) UNPACK = $(RM) -R $@ \ - $(foreach f,$(filter %.tar.gz %.tgz,$^), && tar xvzfo $(f)) \ - $(foreach f,$(filter %.tar.bz2,$^), && tar xvjfo $(f)) \ - $(foreach f,$(filter %.tar.xz,$^), && tar xvJfo $(f)) \ - $(foreach f,$(filter %.zip,$^), && unzip $(f)) + $(foreach f,$(filter %.tar.gz %.tgz,$^), && tar xzfo $(f)) \ + $(foreach f,$(filter %.tar.bz2,$^), && tar xjfo $(f)) \ + $(foreach f,$(filter %.tar.xz,$^), && tar xJfo $(f)) \ + $(foreach f,$(filter %.zip,$^), && unzip -q $(f)) UNPACK_DIR = $(patsubst %.tar,%,$(basename $(notdir $<))) APPLY = (cd $(UNPACK_DIR) && patch -fp1) < MOVE = mv $(UNPACK_DIR) $@ && touch $@ diff --git a/pkg/contrib/src/njs/Makefile b/pkg/contrib/src/njs/Makefile index 6a4fdf9d..643ae703 100644 --- a/pkg/contrib/src/njs/Makefile +++ b/pkg/contrib/src/njs/Makefile @@ -1,7 +1,7 @@ # njs include $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/version -NJS_URL := https://hg.nginx.org/njs/archive/$(NJS_VERSION).tar.gz +NJS_URL := https://github.com/nginx/njs/archive/$(NJS_VERSION).tar.gz PKGS += njs diff --git a/pkg/contrib/src/njs/SHA512SUMS b/pkg/contrib/src/njs/SHA512SUMS index 43766487..175c46ee 100644 --- a/pkg/contrib/src/njs/SHA512SUMS +++ b/pkg/contrib/src/njs/SHA512SUMS @@ -1 +1 @@ -cc3110a0c6866dfc03d19c58745e5b75aa9792999db45bc55a752f7b04db8ae51322bfe0156b873109c8477c6c1a030c851c770697cf6791c6e89fb2fed0a2c5 njs-0.8.2.tar.gz +09fb37d609f5cb97b0af5eb097a017233af2eacb2d38071346b49f5e03b5e37280eebb360fc824acba0c600c44d234e2d11fa55f4bc913319491d7789a94171c njs-0.8.5.tar.gz diff --git a/pkg/contrib/src/njs/version b/pkg/contrib/src/njs/version index 00453419..ebfb5987 100644 --- a/pkg/contrib/src/njs/version +++ b/pkg/contrib/src/njs/version @@ -1 +1 @@ -NJS_VERSION := 0.8.2 +NJS_VERSION := 0.8.5 diff --git a/pkg/contrib/src/wasi-sysroot/Makefile b/pkg/contrib/src/wasi-sysroot/Makefile index fcfb8df3..a02a6591 100644 --- a/pkg/contrib/src/wasi-sysroot/Makefile +++ b/pkg/contrib/src/wasi-sysroot/Makefile @@ -12,6 +12,7 @@ $(TARBALLS)/wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MI wasi-sysroot: wasi-sysroot-$(WASI_SYSROOT_VERSION_MAJOR).$(WASI_SYSROOT_VERSION_MINOR).tar.gz .sum-wasi-sysroot $(UNPACK) + $(MOVE) .wasi-sysroot: wasi-sysroot touch $@ diff --git a/pkg/contrib/src/wasi-sysroot/SHA512SUMS b/pkg/contrib/src/wasi-sysroot/SHA512SUMS index a1e71fff..ffb6e23f 100644 --- a/pkg/contrib/src/wasi-sysroot/SHA512SUMS +++ b/pkg/contrib/src/wasi-sysroot/SHA512SUMS @@ -1 +1 @@ -ad4ad629d02f01f3d2eb977dd0bc43091b0f11ed1b5dd9fdb3580e4cf49c132f6cb4982ae80eabf638f0d08d0c4c7df40cceb2be8f9d2c29abc35b8564ffda42 wasi-sysroot-20.0.tar.gz +6bf138fc90feccc0cfa2683d164a0c8cfb973b5105675ff53b87628d8775676a8eb383d225ca4b55d6f0f800d167a605ee569978a2048b3dab8d01672a408d7a wasi-sysroot-24.0.tar.gz diff --git a/pkg/contrib/src/wasi-sysroot/version b/pkg/contrib/src/wasi-sysroot/version index 919c7098..ed18dd4b 100644 --- a/pkg/contrib/src/wasi-sysroot/version +++ b/pkg/contrib/src/wasi-sysroot/version @@ -1,2 +1,2 @@ -WASI_SYSROOT_VERSION_MAJOR := 20 +WASI_SYSROOT_VERSION_MAJOR := 24 WASI_SYSROOT_VERSION_MINOR := 0 diff --git a/pkg/contrib/src/wasmtime/Makefile b/pkg/contrib/src/wasmtime/Makefile index 11797fee..2a6e8abf 100644 --- a/pkg/contrib/src/wasmtime/Makefile +++ b/pkg/contrib/src/wasmtime/Makefile @@ -11,10 +11,6 @@ else CARGO = $(error Cargo (Rust package manager) not found) endif -ifeq ($(shell uname -s),Linux) -WASMTIME_ARGS=-Clink-arg=-Wl,-soname,libwasmtime.so -endif - $(TARBALLS)/wasmtime-v$(WASMTIME_VERSION)-src.tar.gz: $(call download_pkg,$(WASMTIME_URL),wasmtime) @@ -25,6 +21,11 @@ wasmtime: wasmtime-v$(WASMTIME_VERSION)-src.tar.gz .sum-wasmtime $(MOVE) .wasmtime: wasmtime - cd $< && $(CARGO) rustc --release -p wasmtime-c-api -- $(WASMTIME_ARGS) - cp $</crates/c-api/wasm-c-api/include/wasm.h $</crates/c-api/include/ + cd $< && cmake \ + -DCMAKE_INSTALL_LIBDIR=lib \ + -S crates/c-api \ + -B target/c-api \ + --install-prefix "$(TOPSRC)/wasmtime/artifacts" + cd $< && cmake --build target/c-api + cd $< && cmake --install target/c-api touch $@ diff --git a/pkg/contrib/src/wasmtime/SHA512SUMS b/pkg/contrib/src/wasmtime/SHA512SUMS index 35e0e47f..fc1ba863 100644 --- a/pkg/contrib/src/wasmtime/SHA512SUMS +++ b/pkg/contrib/src/wasmtime/SHA512SUMS @@ -1 +1 @@ -4b67ba0742da0558efffe1dbde5512dc5f0201fad25f1027d277758e76778b2add11528dbe3f5b7759f2386859b52aea3a0526abaa481c2ed91eb56c5a531b49 wasmtime-v11.0.1-src.tar.gz +2ce3979f772176350a2c4694cfd24c241c426d453d99f8620424b25ef1373ea5be06370c8199f3bd5a46f0ba1a4cd4b702a359efc969d5eaf1e9e78543c5900a wasmtime-v24.0.0-src.tar.gz diff --git a/pkg/contrib/src/wasmtime/version b/pkg/contrib/src/wasmtime/version index 1debf1ff..d418b456 100644 --- a/pkg/contrib/src/wasmtime/version +++ b/pkg/contrib/src/wasmtime/version @@ -1 +1 @@ -WASMTIME_VERSION := 11.0.1 +WASMTIME_VERSION := 24.0.0 diff --git a/pkg/contrib/tarballs/.hgignore b/pkg/contrib/tarballs/.gitignore index 8d876d7b..441875c5 100644 --- a/pkg/contrib/tarballs/.hgignore +++ b/pkg/contrib/tarballs/.gitignore @@ -1,3 +1,2 @@ -syntax:glob *.tar.* *.githash diff --git a/pkg/deb/Makefile b/pkg/deb/Makefile index e48de155..0e6ca191 100644 --- a/pkg/deb/Makefile +++ b/pkg/deb/Makefile @@ -14,15 +14,14 @@ SRCDIR= unit-$(VERSION) CODENAME = $(shell lsb_release -cs) -BUILD_DEPENDS_unit = build-essential debhelper devscripts fakeroot libxml2-utils lintian lsb-release xsltproc libssl-dev +BUILD_DEPENDS_unit = build-essential debhelper devscripts fakeroot libxml2-utils lintian lsb-release xsltproc libssl-dev clang llvm BUILD_DEPENDS = $(BUILD_DEPENDS_unit) MODULES= -# Ubuntu 23.10 -ifeq ($(CODENAME),mantic) +# Ubuntu 24.04 +ifeq ($(CODENAME),noble) include Makefile.php -include Makefile.python311 include Makefile.python312 include Makefile.go include Makefile.perl @@ -30,41 +29,24 @@ include Makefile.ruby include Makefile.jsc-common include Makefile.jsc11 include Makefile.jsc17 -include Makefile.jsc19 -include Makefile.jsc20 include Makefile.jsc21 include Makefile.wasm endif -# Ubuntu 23.04 -ifeq ($(CODENAME),lunar) +# Ubuntu 23.10 +ifeq ($(CODENAME),mantic) include Makefile.php include Makefile.python311 +include Makefile.python312 include Makefile.go include Makefile.perl include Makefile.ruby include Makefile.jsc-common include Makefile.jsc11 include Makefile.jsc17 -include Makefile.jsc18 include Makefile.jsc19 include Makefile.jsc20 -include Makefile.wasm -endif - -# Ubuntu 22.10 -ifeq ($(CODENAME),kinetic) -include Makefile.php -include Makefile.python27 -include Makefile.python310 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc11 -include Makefile.jsc17 -include Makefile.jsc18 -include Makefile.jsc19 +include Makefile.jsc21 include Makefile.wasm endif @@ -83,23 +65,6 @@ include Makefile.jsc18 include Makefile.wasm endif -# Ubuntu 21.10 -ifeq ($(CODENAME),impish) -include Makefile.php -include Makefile.python27 -include Makefile.python39 -include Makefile.python310 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc11 -include Makefile.jsc16 -include Makefile.jsc17 -include Makefile.jsc18 -include Makefile.wasm -endif - # Ubuntu 20.04 ifeq ($(CODENAME),focal) include Makefile.php @@ -113,22 +78,6 @@ include Makefile.jsc11 include Makefile.wasm endif -# Ubuntu 18.04 -ifeq ($(CODENAME),bionic) -include Makefile.php -include Makefile.python27 -include Makefile.python36 -include Makefile.python37 -include Makefile.python38 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc8 -include Makefile.jsc11 -include Makefile.wasm -endif - # Debian 12 ifeq ($(CODENAME),bookworm) include Makefile.php @@ -165,19 +114,6 @@ include Makefile.jsc11 include Makefile.wasm endif -# Debian 10 -ifeq ($(CODENAME),buster) -include Makefile.php -include Makefile.python27 -include Makefile.python37 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc11 -include Makefile.wasm -endif - CONFIGURE_ARGS_COMMON=\ --prefix=/usr \ --statedir=/var/lib/unit \ @@ -214,9 +150,12 @@ check-build-depends-%: esac ; \ not_installed= ; \ for pkg in $${pkgs}; do \ - dpkg-query -W $${pkg} >/dev/null 2>&1 ; \ - if [ $$? -ne 0 ]; then \ - not_installed="$${not_installed} $${pkg}" ; \ + i=$$(dpkg-query -f '$${db:Status-Status}' -W $${pkg}:$$(dpkg --print-architecture) 2>/dev/null) ; \ + if [ $$? -ne 0 -o "$${i}" != "installed" ]; then \ + i=$$(dpkg-query -f '$${db:Status-Status}' -W $${pkg}:all 2>/dev/null) ; \ + if [ $$? -ne 0 -o "$${i}" != "installed" ]; then \ + not_installed="$${not_installed} $${pkg}" ; \ + fi; \ fi ; \ done ; \ if test -n "$${not_installed}" ; then \ diff --git a/pkg/deb/Makefile.jsc10 b/pkg/deb/Makefile.jsc10 deleted file mode 100644 index 43ded86b..00000000 --- a/pkg/deb/Makefile.jsc10 +++ /dev/null @@ -1,71 +0,0 @@ -MODULES+= jsc10 -MODULE_SUFFIX_jsc10= jsc10 - -MODULE_SUMMARY_jsc10= Java 10 module for NGINX Unit - -MODULE_VERSION_jsc10= $(VERSION) -MODULE_RELEASE_jsc10= 1 - -MODULE_CONFARGS_jsc10= java --module=java10 --home=/usr/lib/jvm/java-11-openjdk-$$\(DEB_HOST_ARCH\) --jars=/usr/share/unit-jsc-common/ -MODULE_MAKEARGS_jsc10= java10 -MODULE_INSTARGS_jsc10= java10-install - -MODULE_SOURCES_jsc10= unit.example-jsc-app \ - unit.example-jsc10-config - -BUILD_DEPENDS_jsc10= openjdk-11-jdk-headless openjdk-11-jre-headless -BUILD_DEPENDS+= $(BUILD_DEPENDS_jsc10) - -MODULE_BUILD_DEPENDS_jsc10=,openjdk-11-jdk-headless -MODULE_DEPENDS_jsc10=,openjdk-11-jre-headless,unit-jsc-common (= $(MODULE_VERSION_jsc_common)-$(MODULE_RELEASE_jsc_common)~$(CODENAME)) - -define MODULE_PREINSTALL_jsc10 - mkdir -p debian/unit-jsc10/usr/share/doc/unit-jsc10/examples/jsc-app - install -m 644 -p debian/unit.example-jsc-app debian/unit-jsc10/usr/share/doc/unit-jsc10/examples/jsc-app/index.jsp - install -m 644 -p debian/unit.example-jsc10-config debian/unit-jsc10/usr/share/doc/unit-jsc10/examples/unit.config - install -m 644 -p src/java/README.JSR-340 debian/unit-jsc10/usr/share/doc/unit-jsc10/ -endef -export MODULE_PREINSTALL_jsc10 - -define MODULE_POSTINSTALL_jsc10 - cd $$\(BUILDDIR_unit\) \&\& \ - DESTDIR=$$\(INSTALLDIR\) make java-shared-uninstall -endef -export MODULE_POSTINSTALL_jsc10 - -define MODULE_POST_jsc10 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_jsc10) has been installed. - -To check out the sample app, run these commands: - - sudo service unit restart - cd /usr/share/doc/unit-$(MODULE_SUFFIX_jsc10)/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/control.unit.sock http://localhost/config - curl http://localhost:8800/ - -Online documentation is available at https://unit.nginx.org - -NOTICE: - -This version of Unit code is made available in support of the open source -development process. This is an intermediate build made available for -testing purposes only. This Unit code is untested and presumed incompatible -with the JSR 340 Java Servlet 3.1 specification. You should not deploy or -write to this code. You should instead deploy and write production -applications on pre-built binaries that have been tested and certified -to meet the JSR-340 compatibility requirements such as certified binaries -published for the JSR-340 reference implementation available at -https://javaee.github.io/glassfish/. - -Redistribution of any Intermediate Build must retain this notice. - -Oracle and Java are registered trademarks of Oracle and/or its affiliates. -Other names may be trademarks of their respective owners. - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_jsc10 diff --git a/pkg/deb/Makefile.jsc16 b/pkg/deb/Makefile.jsc16 deleted file mode 100644 index f45e1299..00000000 --- a/pkg/deb/Makefile.jsc16 +++ /dev/null @@ -1,71 +0,0 @@ -MODULES+= jsc16 -MODULE_SUFFIX_jsc16= jsc16 - -MODULE_SUMMARY_jsc16= Java 16 module for NGINX Unit - -MODULE_VERSION_jsc16= $(VERSION) -MODULE_RELEASE_jsc16= 1 - -MODULE_CONFARGS_jsc16= java --module=java16 --home=/usr/lib/jvm/java-16-openjdk-$$\(DEB_HOST_ARCH\) --jars=/usr/share/unit-jsc-common/ -MODULE_MAKEARGS_jsc16= java16 -MODULE_INSTARGS_jsc16= java16-install - -MODULE_SOURCES_jsc16= unit.example-jsc-app \ - unit.example-jsc16-config - -BUILD_DEPENDS_jsc16= openjdk-16-jdk-headless openjdk-16-jre-headless -BUILD_DEPENDS+= $(BUILD_DEPENDS_jsc16) - -MODULE_BUILD_DEPENDS_jsc16=,openjdk-16-jdk-headless -MODULE_DEPENDS_jsc16=,openjdk-16-jre-headless,unit-jsc-common (= $(MODULE_VERSION_jsc_common)-$(MODULE_RELEASE_jsc_common)~$(CODENAME)) - -define MODULE_PREINSTALL_jsc16 - mkdir -p debian/unit-jsc16/usr/share/doc/unit-jsc16/examples/jsc-app - install -m 644 -p debian/unit.example-jsc-app debian/unit-jsc16/usr/share/doc/unit-jsc16/examples/jsc-app/index.jsp - install -m 644 -p debian/unit.example-jsc16-config debian/unit-jsc16/usr/share/doc/unit-jsc16/examples/unit.config - install -m 644 -p src/java/README.JSR-340 debian/unit-jsc16/usr/share/doc/unit-jsc16/ -endef -export MODULE_PREINSTALL_jsc16 - -define MODULE_POSTINSTALL_jsc16 - cd $$\(BUILDDIR_unit\) \&\& \ - DESTDIR=$$\(INSTALLDIR\) make java-shared-uninstall -endef -export MODULE_POSTINSTALL_jsc16 - -define MODULE_POST_jsc16 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_jsc16) has been installed. - -To check out the sample app, run these commands: - - sudo service unit restart - cd /usr/share/doc/unit-$(MODULE_SUFFIX_jsc16)/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/control.unit.sock http://localhost/config - curl http://localhost:8800/ - -Online documentation is available at https://unit.nginx.org - -NOTICE: - -This version of Unit code is made available in support of the open source -development process. This is an intermediate build made available for -testing purposes only. This Unit code is untested and presumed incompatible -with the JSR 340 Java Servlet 3.1 specification. You should not deploy or -write to this code. You should instead deploy and write production -applications on pre-built binaries that have been tested and certified -to meet the JSR-340 compatibility requirements such as certified binaries -published for the JSR-340 reference implementation available at -https://javaee.github.io/glassfish/. - -Redistribution of any Intermediate Build must retain this notice. - -Oracle and Java are registered trademarks of Oracle and/or its affiliates. -Other names may be trademarks of their respective owners. - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_jsc16 diff --git a/pkg/deb/Makefile.jsc8 b/pkg/deb/Makefile.jsc8 deleted file mode 100644 index d7eed96b..00000000 --- a/pkg/deb/Makefile.jsc8 +++ /dev/null @@ -1,71 +0,0 @@ -MODULES+= jsc8 -MODULE_SUFFIX_jsc8= jsc8 - -MODULE_SUMMARY_jsc8= Java 8 module for NGINX Unit - -MODULE_VERSION_jsc8= $(VERSION) -MODULE_RELEASE_jsc8= 1 - -MODULE_CONFARGS_jsc8= java --module=java8 --home=/usr/lib/jvm/java-8-openjdk-$$\(DEB_HOST_ARCH\) --jars=/usr/share/unit-jsc-common/ -MODULE_MAKEARGS_jsc8= java8 -MODULE_INSTARGS_jsc8= java8-install - -MODULE_SOURCES_jsc8= unit.example-jsc-app \ - unit.example-jsc8-config - -BUILD_DEPENDS_jsc8= openjdk-8-jdk-headless openjdk-8-jre-headless -BUILD_DEPENDS+= $(BUILD_DEPENDS_jsc8) - -MODULE_BUILD_DEPENDS_jsc8=,openjdk-8-jdk-headless -MODULE_DEPENDS_jsc8=,openjdk-8-jre-headless,unit-jsc-common (= $(MODULE_VERSION_jsc_common)-$(MODULE_RELEASE_jsc_common)~$(CODENAME)) - -define MODULE_PREINSTALL_jsc8 - mkdir -p debian/unit-jsc8/usr/share/doc/unit-jsc8/examples/jsc-app - install -m 644 -p debian/unit.example-jsc-app debian/unit-jsc8/usr/share/doc/unit-jsc8/examples/jsc-app/index.jsp - install -m 644 -p debian/unit.example-jsc8-config debian/unit-jsc8/usr/share/doc/unit-jsc8/examples/unit.config - install -m 644 -p src/java/README.JSR-340 debian/unit-jsc8/usr/share/doc/unit-jsc8/ -endef -export MODULE_PREINSTALL_jsc8 - -define MODULE_POSTINSTALL_jsc8 - cd $$\(BUILDDIR_unit\) \&\& \ - DESTDIR=$$\(INSTALLDIR\) make java-shared-uninstall -endef -export MODULE_POSTINSTALL_jsc8 - -define MODULE_POST_jsc8 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_jsc8) has been installed. - -To check out the sample app, run these commands: - - sudo service unit restart - cd /usr/share/doc/unit-$(MODULE_SUFFIX_jsc8)/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/control.unit.sock http://localhost/config - curl http://localhost:8800/ - -Online documentation is available at https://unit.nginx.org - -NOTICE: - -This version of Unit code is made available in support of the open source -development process. This is an intermediate build made available for -testing purposes only. This Unit code is untested and presumed incompatible -with the JSR 340 Java Servlet 3.1 specification. You should not deploy or -write to this code. You should instead deploy and write production -applications on pre-built binaries that have been tested and certified -to meet the JSR-340 compatibility requirements such as certified binaries -published for the JSR-340 reference implementation available at -https://javaee.github.io/glassfish/. - -Redistribution of any Intermediate Build must retain this notice. - -Oracle and Java are registered trademarks of Oracle and/or its affiliates. -Other names may be trademarks of their respective owners. - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_jsc8 diff --git a/pkg/deb/Makefile.python36 b/pkg/deb/Makefile.python36 deleted file mode 100644 index 4fd898c6..00000000 --- a/pkg/deb/Makefile.python36 +++ /dev/null @@ -1,46 +0,0 @@ -MODULES+= python36 -MODULE_SUFFIX_python36= python3.6 - -MODULE_SUMMARY_python36= Python 3.6 module for NGINX Unit - -MODULE_VERSION_python36= $(VERSION) -MODULE_RELEASE_python36= 1 - -MODULE_CONFARGS_python36= python --config=python3.6-config -MODULE_MAKEARGS_python36= python3.6 -MODULE_INSTARGS_python36= python3.6-install - -MODULE_SOURCES_python36= unit.example-python-app \ - unit.example-python3.6-config - -BUILD_DEPENDS_python36= python3.6-dev -BUILD_DEPENDS+= $(BUILD_DEPENDS_python36) - -MODULE_BUILD_DEPENDS_python36=,python3.6-dev - -define MODULE_PREINSTALL_python36 - mkdir -p debian/unit-python3.6/usr/share/doc/unit-python3.6/examples/python-app - install -m 644 -p debian/unit.example-python-app debian/unit-python3.6/usr/share/doc/unit-python3.6/examples/python-app/wsgi.py - install -m 644 -p debian/unit.example-python3.6-config debian/unit-python3.6/usr/share/doc/unit-python3.6/examples/unit.config -endef -export MODULE_PREINSTALL_python36 - -define MODULE_POST_python36 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_python36) has been installed. - -To check out the sample app, run these commands: - - sudo service unit restart - cd /usr/share/doc/unit-$(MODULE_SUFFIX_python36)/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/control.unit.sock http://localhost/config - curl http://localhost:8400/ - -Online documentation is available at https://unit.nginx.org - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_python36 diff --git a/pkg/deb/Makefile.python37 b/pkg/deb/Makefile.python37 deleted file mode 100644 index 54dd8c4c..00000000 --- a/pkg/deb/Makefile.python37 +++ /dev/null @@ -1,46 +0,0 @@ -MODULES+= python37 -MODULE_SUFFIX_python37= python3.7 - -MODULE_SUMMARY_python37= Python 3.7 module for NGINX Unit - -MODULE_VERSION_python37= $(VERSION) -MODULE_RELEASE_python37= 1 - -MODULE_CONFARGS_python37= python --config=python3.7-config -MODULE_MAKEARGS_python37= python3.7 -MODULE_INSTARGS_python37= python3.7-install - -MODULE_SOURCES_python37= unit.example-python-app \ - unit.example-python3.7-config - -BUILD_DEPENDS_python37= python3.7-dev -BUILD_DEPENDS+= $(BUILD_DEPENDS_python37) - -MODULE_BUILD_DEPENDS_python37=,python3.7-dev - -define MODULE_PREINSTALL_python37 - mkdir -p debian/unit-python3.7/usr/share/doc/unit-python3.7/examples/python-app - install -m 644 -p debian/unit.example-python-app debian/unit-python3.7/usr/share/doc/unit-python3.7/examples/python-app/wsgi.py - install -m 644 -p debian/unit.example-python3.7-config debian/unit-python3.7/usr/share/doc/unit-python3.7/examples/unit.config -endef -export MODULE_PREINSTALL_python37 - -define MODULE_POST_python37 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_python37) has been installed. - -To check out the sample app, run these commands: - - sudo service unit restart - cd /usr/share/doc/unit-$(MODULE_SUFFIX_python37)/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/control.unit.sock http://localhost/config - curl http://localhost:8400/ - -Online documentation is available at https://unit.nginx.org - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_python37 diff --git a/pkg/deb/Makefile.wasm b/pkg/deb/Makefile.wasm index 8f3fdc67..de89841e 100644 --- a/pkg/deb/Makefile.wasm +++ b/pkg/deb/Makefile.wasm @@ -6,14 +6,19 @@ MODULE_SUMMARY_wasm= WASM module for NGINX Unit MODULE_VERSION_wasm= $(VERSION) MODULE_RELEASE_wasm= 1 -MODULE_CONFARGS_wasm= wasm --include-path=\$$(CURDIR)/pkg/contrib/wasmtime/crates/c-api/include --lib-path=\$$(CURDIR)/pkg/contrib/wasmtime/target/release \&\& ./configure wasm-wasi-component -MODULE_MAKEARGS_wasm= wasm wasm-wasi-component CFLAGS=\"\$$(shell grep ^CFLAGS \$$(BUILDDIR_\$$*)/build/Makefile | cut -d' ' -f 3-) -Wno-missing-prototypes\" -MODULE_INSTARGS_wasm= wasm-install wasm-wasi-component-install +MODULE_CONFARGS_wasm= wasm-wasi-component +MODULE_MAKEARGS_wasm= wasm-wasi-component CFLAGS=\"\$$(shell grep ^CFLAGS \$$(BUILDDIR_\$$*)/build/Makefile | cut -d' ' -f 3-) -Wno-missing-prototypes\" +MODULE_INSTARGS_wasm= wasm-wasi-component-install + +ifeq (,$(findstring $(CODENAME),bullseye focal)) +MODULE_CONFARGS_wasm+= \&\& ./configure wasm --include-path=\$$(CURDIR)/pkg/contrib/wasmtime/artifacts/include --lib-path=\$$(CURDIR)/pkg/contrib/wasmtime/artifacts/lib +MODULE_MAKEARGS_wasm+= wasm +MODULE_INSTARGS_wasm+= wasm-install MODULE_SOURCES_wasm= -BUILD_DEPENDS_wasm= -MODULE_BUILD_DEPENDS_wasm= +BUILD_DEPENDS_wasm= cmake +MODULE_BUILD_DEPENDS_wasm=,cmake MODULE_DEPENDS_wasm= BUILD_DEPENDS+= $(BUILD_DEPENDS_wasm) @@ -29,9 +34,10 @@ export MODULE_PREINSTALL_wasm define MODULE_POSTINSTALL_wasm mkdir -p debian/unit-wasm/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ - install -m 755 -p pkg/contrib/wasmtime/target/release/libwasmtime.so debian/unit-wasm/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ + install -m 755 -p pkg/contrib/wasmtime/artifacts/lib/libwasmtime.so debian/unit-wasm/usr/lib/\$$(dpkg-architecture -q DEB_HOST_MULTIARCH)/ endef export MODULE_POSTINSTALL_wasm +endif define MODULE_POST_wasm cat <<BANNER diff --git a/pkg/deb/debian.module/copyright.unit-jsc8 b/pkg/deb/debian.module/copyright.unit-jsc8 deleted file mode 100644 index 60da2dfa..00000000 --- a/pkg/deb/debian.module/copyright.unit-jsc8 +++ /dev/null @@ -1,42 +0,0 @@ - - NGINX Unit. - - Copyright 2017-2024 NGINX, Inc. - Copyright 2017-2024 Andrei Zeliankou - Copyright 2018-2024 Konstantin Pavlov - Copyright 2021-2024 Zhidao Hong - Copyright 2022-2024 Andrew Clayton - Copyright 2022-2024 Liam Crilly - Copyright 2023-2024 Dan Callahan - Copyright 2023-2024 Danielle De Leo - Copyright 2023-2024 Dylan Arbour - Copyright 2023-2024 Gabor Javorszky - Copyright 2023-2024 Igor Ippolitov - Copyright 2023-2024 Taryn Musgrave - Copyright 2021-2023 Alejandro Colomar - Copyright 2017-2022 Valentin V. Bartenev - Copyright 2017-2022 Max Romanov - Copyright 2021-2022 Oisín Canty - Copyright 2017-2021 Igor Sysoev - Copyright 2017-2021 Andrei Belov - Copyright 2019-2021 Tiago Natel de Moura - Copyright 2019-2020 Axel Duch - Copyright 2018-2019 Alexander Borisov - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - /usr/share/common-licenses/Apache-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - The unit-jsc8 package provides Java servlet container module - for NGINX Unit. - - Java is a registered trademark of Oracle and/or its affiliates. diff --git a/pkg/deb/debian.module/rules-noarch.in b/pkg/deb/debian.module/rules-noarch.in index e56e06bc..f311438d 100644 --- a/pkg/deb/debian.module/rules-noarch.in +++ b/pkg/deb/debian.module/rules-noarch.in @@ -3,8 +3,13 @@ # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 -export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie +include /usr/share/dpkg/vendor.mk +ifeq ($(shell $(call dpkg_vendor_derives_from,ubuntu)),yes) +export DEB_CFLAGS_MAINT_APPEND=-fPIC +else export DEB_CFLAGS_MAINT_APPEND=-Wp,-D_FORTIFY_SOURCE=2 -fPIC +endif +export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie DPKG_EXPORT_BUILDFLAGS = 1 include /usr/share/dpkg/buildflags.mk diff --git a/pkg/deb/debian.module/rules.in b/pkg/deb/debian.module/rules.in index 7814fbfd..8877ff23 100755 --- a/pkg/deb/debian.module/rules.in +++ b/pkg/deb/debian.module/rules.in @@ -3,8 +3,13 @@ # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 -export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie +include /usr/share/dpkg/vendor.mk +ifeq ($(shell $(call dpkg_vendor_derives_from,ubuntu)),yes) +export DEB_CFLAGS_MAINT_APPEND=-fPIC +else export DEB_CFLAGS_MAINT_APPEND=-Wp,-D_FORTIFY_SOURCE=2 -fPIC +endif +export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie DPKG_EXPORT_BUILDFLAGS = 1 include /usr/share/dpkg/buildflags.mk diff --git a/pkg/deb/debian.module/unit.example-jsc16-config b/pkg/deb/debian.module/unit.example-jsc16-config deleted file mode 100644 index 0b10a44d..00000000 --- a/pkg/deb/debian.module/unit.example-jsc16-config +++ /dev/null @@ -1,15 +0,0 @@ -{ - "applications": { - "example_java16": { - "processes": 1, - "type": "java 16", - "webapp": "/usr/share/doc/unit-jsc16/examples/jsc-app" - } - }, - - "listeners": { - "*:8800": { - "pass": "applications/example_java16" - } - } -} diff --git a/pkg/deb/debian.module/unit.example-jsc8-config b/pkg/deb/debian.module/unit.example-jsc8-config deleted file mode 100644 index 4d79112f..00000000 --- a/pkg/deb/debian.module/unit.example-jsc8-config +++ /dev/null @@ -1,15 +0,0 @@ -{ - "applications": { - "example_java8": { - "processes": 1, - "type": "java 1.8.0", - "webapp": "/usr/share/doc/unit-jsc8/examples/jsc-app" - } - }, - - "listeners": { - "*:8800": { - "pass": "applications/example_java8" - } - } -} diff --git a/pkg/deb/debian.module/unit.example-python3.6-config b/pkg/deb/debian.module/unit.example-python3.6-config deleted file mode 100644 index 543024ff..00000000 --- a/pkg/deb/debian.module/unit.example-python3.6-config +++ /dev/null @@ -1,16 +0,0 @@ -{ - "applications": { - "example_python": { - "type": "python 3.6", - "processes": 2, - "path": "/usr/share/doc/unit-python3.6/examples/python-app", - "module": "wsgi" - } - }, - - "listeners": { - "*:8400": { - "pass": "applications/example_python" - } - } -} diff --git a/pkg/deb/debian.module/unit.example-python3.7-config b/pkg/deb/debian.module/unit.example-python3.7-config deleted file mode 100644 index e7b8dbc3..00000000 --- a/pkg/deb/debian.module/unit.example-python3.7-config +++ /dev/null @@ -1,16 +0,0 @@ -{ - "applications": { - "example_python": { - "type": "python 3.7", - "processes": 2, - "path": "/usr/share/doc/unit-python3.7/examples/python-app", - "module": "wsgi" - } - }, - - "listeners": { - "*:8400": { - "pass": "applications/example_python" - } - } -} diff --git a/pkg/deb/debian/rules.in b/pkg/deb/debian/rules.in index 55a4ebec..dd75b562 100644 --- a/pkg/deb/debian/rules.in +++ b/pkg/deb/debian/rules.in @@ -3,8 +3,13 @@ # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 -export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie +include /usr/share/dpkg/vendor.mk +ifeq ($(shell $(call dpkg_vendor_derives_from,ubuntu)),yes) +export DEB_CFLAGS_MAINT_APPEND=-fPIC +else export DEB_CFLAGS_MAINT_APPEND=-Wp,-D_FORTIFY_SOURCE=2 -fPIC +endif +export DEB_BUILD_MAINT_OPTIONS=hardening=+all,-pie export DEB_LDFLAGS_MAINT_APPEND=-Wl,--as-needed -pie DPKG_EXPORT_BUILDFLAGS = 1 include /usr/share/dpkg/buildflags.mk diff --git a/pkg/docker/Dockerfile.go1.22 b/pkg/docker/Dockerfile.go1.22 index 7794353b..796a6715 100644 --- a/pkg/docker/Dockerfile.go1.22 +++ b/pkg/docker/Dockerfile.go1.22 @@ -1,4 +1,4 @@ -FROM golang:1.22-bullseye +FROM golang:1.22-bookworm LABEL org.opencontainers.image.title="Unit (go1.22)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.go1.21 b/pkg/docker/Dockerfile.go1.23 index 637e0c79..a62be794 100644 --- a/pkg/docker/Dockerfile.go1.21 +++ b/pkg/docker/Dockerfile.go1.23 @@ -1,12 +1,12 @@ -FROM golang:1.21-bullseye +FROM golang:1.23-bookworm -LABEL org.opencontainers.image.title="Unit (go1.21)" +LABEL org.opencontainers.image.title="Unit (go1.23)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.jsc11 b/pkg/docker/Dockerfile.jsc11 index fb3bdea0..d133d5b2 100644 --- a/pkg/docker/Dockerfile.jsc11 +++ b/pkg/docker/Dockerfile.jsc11 @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.minimal b/pkg/docker/Dockerfile.minimal index a50df863..3880de7a 100644 --- a/pkg/docker/Dockerfile.minimal +++ b/pkg/docker/Dockerfile.minimal @@ -1,4 +1,4 @@ -FROM debian:bullseye-slim +FROM debian:bookworm-slim LABEL org.opencontainers.image.title="Unit (minimal)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.node20 b/pkg/docker/Dockerfile.node20 index 6c1518a3..5ae09797 100644 --- a/pkg/docker/Dockerfile.node20 +++ b/pkg/docker/Dockerfile.node20 @@ -1,4 +1,4 @@ -FROM node:20-bullseye +FROM node:20-bookworm LABEL org.opencontainers.image.title="Unit (node20)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.node21 b/pkg/docker/Dockerfile.node22 index 9ff49215..85f8aaab 100644 --- a/pkg/docker/Dockerfile.node21 +++ b/pkg/docker/Dockerfile.node22 @@ -1,12 +1,12 @@ -FROM node:21-bullseye +FROM node:22-bookworm -LABEL org.opencontainers.image.title="Unit (node21)" +LABEL org.opencontainers.image.title="Unit (node22)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.perl5.38 b/pkg/docker/Dockerfile.perl5.38 index 852bfbb7..88ac4961 100644 --- a/pkg/docker/Dockerfile.perl5.38 +++ b/pkg/docker/Dockerfile.perl5.38 @@ -1,4 +1,4 @@ -FROM perl:5.38-bullseye +FROM perl:5.38-bookworm LABEL org.opencontainers.image.title="Unit (perl5.38)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.perl5.36 b/pkg/docker/Dockerfile.perl5.40 index f25ec048..aa67d7cc 100644 --- a/pkg/docker/Dockerfile.perl5.36 +++ b/pkg/docker/Dockerfile.perl5.40 @@ -1,12 +1,12 @@ -FROM perl:5.36-bullseye +FROM perl:5.40-bookworm -LABEL org.opencontainers.image.title="Unit (perl5.36)" +LABEL org.opencontainers.image.title="Unit (perl5.40)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.php8.2 b/pkg/docker/Dockerfile.php8.2 index 96999b3b..354acf94 100644 --- a/pkg/docker/Dockerfile.php8.2 +++ b/pkg/docker/Dockerfile.php8.2 @@ -1,4 +1,4 @@ -FROM php:8.2-cli-bullseye +FROM php:8.2-cli-bookworm LABEL org.opencontainers.image.title="Unit (php8.2)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.php8.3 b/pkg/docker/Dockerfile.php8.3 index 2b87fe74..98bd3843 100644 --- a/pkg/docker/Dockerfile.php8.3 +++ b/pkg/docker/Dockerfile.php8.3 @@ -1,4 +1,4 @@ -FROM php:8.3-cli-bullseye +FROM php:8.3-cli-bookworm LABEL org.opencontainers.image.title="Unit (php8.3)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.python3.11 b/pkg/docker/Dockerfile.python3.11 index 381509b3..a0a06a48 100644 --- a/pkg/docker/Dockerfile.python3.11 +++ b/pkg/docker/Dockerfile.python3.11 @@ -1,4 +1,4 @@ -FROM python:3.11-bullseye +FROM python:3.11-bookworm LABEL org.opencontainers.image.title="Unit (python3.11)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.python3.11-slim b/pkg/docker/Dockerfile.python3.11-slim new file mode 100644 index 00000000..bb9b32a8 --- /dev/null +++ b/pkg/docker/Dockerfile.python3.11-slim @@ -0,0 +1,89 @@ +FROM python:3.11-slim-bookworm + +LABEL org.opencontainers.image.title="Unit (python3.11-slim)" +LABEL org.opencontainers.image.description="Official build of Unit for Docker." +LABEL org.opencontainers.image.url="https://unit.nginx.org" +LABEL org.opencontainers.image.source="https://github.com/nginx/unit" +LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" +LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" +LABEL org.opencontainers.image.version="1.33.0" + +RUN set -ex \ + && savedAptMark="$(apt-mark showmanual)" \ + && apt-get update \ + && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates git build-essential libssl-dev libpcre2-dev curl pkg-config \ + && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ + && mkdir -p /usr/src/unit \ + && cd /usr/src/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ + && cd unit \ + && NCPU="$(getconf _NPROCESSORS_ONLN)" \ + && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ + && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \ + && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \ + && CONFIGURE_ARGS_MODULES="--prefix=/usr \ + --statedir=/var/lib/unit \ + --control=unix:/var/run/control.unit.sock \ + --runstatedir=/var/run \ + --pid=/var/run/unit.pid \ + --logdir=/var/log \ + --log=/var/log/unit.log \ + --tmpdir=/var/tmp \ + --user=unit \ + --group=unit \ + --openssl \ + --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ + && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \ + --njs" \ + && make -j $NCPU -C pkg/contrib .njs \ + && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \ + && make clean \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd \ + && make clean \ + && /bin/true \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && make clean \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && cd \ + && rm -rf /usr/src/unit \ + && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \ + ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ + done \ + && apt-mark showmanual | xargs apt-mark auto > /dev/null \ + && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ + && /bin/true \ + && mkdir -p /var/lib/unit/ \ + && mkdir -p /docker-entrypoint.d/ \ + && groupadd --gid 999 unit \ + && useradd \ + --uid 999 \ + --gid unit \ + --no-create-home \ + --home /nonexistent \ + --comment "unit user" \ + --shell /bin/false \ + unit \ + && apt-get update \ + && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \ + && apt-get purge -y --auto-remove build-essential \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /requirements.apt \ + && ln -sf /dev/stderr /var/log/unit.log + +COPY docker-entrypoint.sh /usr/local/bin/ +COPY welcome.* /usr/share/unit/welcome/ + +STOPSIGNAL SIGTERM + +ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"] +EXPOSE 80 +CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] diff --git a/pkg/docker/Dockerfile.python3.12 b/pkg/docker/Dockerfile.python3.12 index a96a314b..f32fd45b 100644 --- a/pkg/docker/Dockerfile.python3.12 +++ b/pkg/docker/Dockerfile.python3.12 @@ -1,4 +1,4 @@ -FROM python:3.12-bullseye +FROM python:3.12-bookworm LABEL org.opencontainers.image.title="Unit (python3.12)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.python3.12-slim b/pkg/docker/Dockerfile.python3.12-slim new file mode 100644 index 00000000..65ada57c --- /dev/null +++ b/pkg/docker/Dockerfile.python3.12-slim @@ -0,0 +1,89 @@ +FROM python:3.12-slim-bookworm + +LABEL org.opencontainers.image.title="Unit (python3.12-slim)" +LABEL org.opencontainers.image.description="Official build of Unit for Docker." +LABEL org.opencontainers.image.url="https://unit.nginx.org" +LABEL org.opencontainers.image.source="https://github.com/nginx/unit" +LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" +LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" +LABEL org.opencontainers.image.version="1.33.0" + +RUN set -ex \ + && savedAptMark="$(apt-mark showmanual)" \ + && apt-get update \ + && apt-get install --no-install-recommends --no-install-suggests -y ca-certificates git build-essential libssl-dev libpcre2-dev curl pkg-config \ + && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ + && mkdir -p /usr/src/unit \ + && cd /usr/src/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ + && cd unit \ + && NCPU="$(getconf _NPROCESSORS_ONLN)" \ + && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ + && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \ + && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \ + && CONFIGURE_ARGS_MODULES="--prefix=/usr \ + --statedir=/var/lib/unit \ + --control=unix:/var/run/control.unit.sock \ + --runstatedir=/var/run \ + --pid=/var/run/unit.pid \ + --logdir=/var/log \ + --log=/var/log/unit.log \ + --tmpdir=/var/tmp \ + --user=unit \ + --group=unit \ + --openssl \ + --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ + && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \ + --njs" \ + && make -j $NCPU -C pkg/contrib .njs \ + && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd-debug \ + && make clean \ + && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/modules \ + && make -j $NCPU unitd \ + && install -pm755 build/sbin/unitd /usr/sbin/unitd \ + && make clean \ + && /bin/true \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && make clean \ + && ./configure $CONFIGURE_ARGS_MODULES --cc-opt="$CC_OPT" --modulesdir=/usr/lib/unit/modules \ + && ./configure python --config=/usr/local/bin/python3-config \ + && make -j $NCPU python3-install \ + && cd \ + && rm -rf /usr/src/unit \ + && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \ + ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ + done \ + && apt-mark showmanual | xargs apt-mark auto > /dev/null \ + && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ + && /bin/true \ + && mkdir -p /var/lib/unit/ \ + && mkdir -p /docker-entrypoint.d/ \ + && groupadd --gid 999 unit \ + && useradd \ + --uid 999 \ + --gid unit \ + --no-create-home \ + --home /nonexistent \ + --comment "unit user" \ + --shell /bin/false \ + unit \ + && apt-get update \ + && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \ + && apt-get purge -y --auto-remove build-essential \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /requirements.apt \ + && ln -sf /dev/stderr /var/log/unit.log + +COPY docker-entrypoint.sh /usr/local/bin/ +COPY welcome.* /usr/share/unit/welcome/ + +STOPSIGNAL SIGTERM + +ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"] +EXPOSE 80 +CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] diff --git a/pkg/docker/Dockerfile.ruby3.2 b/pkg/docker/Dockerfile.ruby3.2 index 95bcdfa7..d3ab399c 100644 --- a/pkg/docker/Dockerfile.ruby3.2 +++ b/pkg/docker/Dockerfile.ruby3.2 @@ -1,4 +1,4 @@ -FROM ruby:3.2-bullseye +FROM ruby:3.2-bookworm LABEL org.opencontainers.image.title="Unit (ruby3.2)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.ruby3.3 b/pkg/docker/Dockerfile.ruby3.3 index 55f8a7d8..b83c754b 100644 --- a/pkg/docker/Dockerfile.ruby3.3 +++ b/pkg/docker/Dockerfile.ruby3.3 @@ -1,4 +1,4 @@ -FROM ruby:3.3-bullseye +FROM ruby:3.3-bookworm LABEL org.opencontainers.image.title="Unit (ruby3.3)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ diff --git a/pkg/docker/Dockerfile.wasm b/pkg/docker/Dockerfile.wasm index 0fe8ed14..dcdb3672 100644 --- a/pkg/docker/Dockerfile.wasm +++ b/pkg/docker/Dockerfile.wasm @@ -1,4 +1,4 @@ -FROM debian:bullseye-slim +FROM debian:bookworm-slim LABEL org.opencontainers.image.title="Unit (wasm)" LABEL org.opencontainers.image.description="Official build of Unit for Docker." @@ -6,7 +6,7 @@ LABEL org.opencontainers.image.url="https://unit.nginx.org" LABEL org.opencontainers.image.source="https://github.com/nginx/unit" LABEL org.opencontainers.image.documentation="https://unit.nginx.org/installation/#docker-images" LABEL org.opencontainers.image.vendor="NGINX Docker Maintainers <docker-maint@nginx.com>" -LABEL org.opencontainers.image.version="1.32.1" +LABEL org.opencontainers.image.version="1.33.0" RUN set -ex \ && savedAptMark="$(apt-mark showmanual)" \ @@ -15,7 +15,7 @@ RUN set -ex \ && mkdir -p /usr/lib/unit/modules /usr/lib/unit/debug-modules \ && mkdir -p /usr/src/unit \ && cd /usr/src/unit \ - && git clone --depth 1 -b 1.32.1-1 https://github.com/nginx/unit \ + && git clone --depth 1 -b 1.33.0-1 https://github.com/nginx/unit \ && cd unit \ && NCPU="$(getconf _NPROCESSORS_ONLN)" \ && DEB_HOST_MULTIARCH="$(dpkg-architecture -q DEB_HOST_MULTIARCH)" \ @@ -46,17 +46,17 @@ RUN set -ex \ && install -pm755 build/sbin/unitd /usr/sbin/unitd \ && make clean \ && apt-get install --no-install-recommends --no-install-suggests -y libclang-dev \ - && export RUST_VERSION=1.76.0 \ + && export RUST_VERSION=1.80.1 \ && export RUSTUP_HOME=/usr/src/unit/rustup \ && export CARGO_HOME=/usr/src/unit/cargo \ && export PATH=/usr/src/unit/cargo/bin:$PATH \ && dpkgArch="$(dpkg --print-architecture)" \ && case "${dpkgArch##*-}" in \ - amd64) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db" ;; \ - arm64) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800" ;; \ + amd64) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="6aeece6993e902708983b209d04c0d1dbb14ebb405ddb87def578d41f920f56d" ;; \ + arm64) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="1cffbf51e63e634c746f741de50649bbbcbd9dbe1de363c9ecef64e278dba2b2" ;; \ *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \ esac \ - && url="https://static.rust-lang.org/rustup/archive/1.26.0/${rustArch}/rustup-init" \ + && url="https://static.rust-lang.org/rustup/archive/1.27.1/${rustArch}/rustup-init" \ && curl -L -O "$url" \ && echo "${rustupSha256} *rustup-init" | sha256sum -c - \ && chmod +x rustup-init \ diff --git a/pkg/docker/Makefile b/pkg/docker/Makefile index d8c53021..9dbca2d2 100644 --- a/pkg/docker/Makefile +++ b/pkg/docker/Makefile @@ -9,8 +9,9 @@ VERSION ?= $(DEFAULT_VERSION) PATCHLEVEL ?= 1 MODULES ?= go jsc node perl php python ruby wasm +MODULES_SLIM ?= python -VARIANT ?= bullseye +VARIANT ?= bookworm VERSIONS_minimal ?= CONTAINER_minimal ?= debian:$(VARIANT)-slim @@ -19,7 +20,7 @@ INSTALL_minimal ?= version RUN_minimal ?= /bin/true MODULE_PREBUILD_minimal ?= /bin/true -VERSIONS_go ?= 1.21 1.22 +VERSIONS_go ?= 1.22 1.23 VARIANT_go ?= $(VARIANT) $(foreach goversion, $(VERSIONS_go), $(eval CONTAINER_go$(goversion) = golang:$(goversion)-$(VARIANT_go))) CONFIGURE_go ?= go --go-path=$$GOPATH @@ -35,7 +36,7 @@ INSTALL_jsc ?= java-shared-install java-install RUN_jsc ?= rm -rf /root/.m2 MODULE_PREBUILD_jsc ?= /bin/true -VERSIONS_node ?= 20 21 +VERSIONS_node ?= 20 22 VARIANT_node ?= $(VARIANT) $(foreach nodeversion, $(VERSIONS_node), $(eval CONTAINER_node$(nodeversion) = node:$(nodeversion)-$(VARIANT_node))) CONFIGURE_node ?= nodejs --node-gyp=/usr/local/bin/node-gyp @@ -43,7 +44,7 @@ INSTALL_node ?= node node-install libunit-install RUN_node ?= rm -rf /root/.cache/ \&\& rm -rf /root/.npm MODULE_PREBUILD_node ?= npm -g install node-gyp -VERSIONS_perl ?= 5.36 5.38 +VERSIONS_perl ?= 5.38 5.40 VARIANT_perl ?= $(VARIANT) $(foreach perlversion, $(VERSIONS_perl), $(eval CONTAINER_perl$(perlversion) = perl:$(perlversion)-$(VARIANT_perl))) CONFIGURE_perl ?= perl @@ -62,6 +63,7 @@ MODULE_PREBUILD_php ?= /bin/true VERSIONS_python ?= 3.11 3.12 VARIANT_python ?= $(VARIANT) $(foreach pythonversion, $(VERSIONS_python), $(eval CONTAINER_python$(pythonversion) = python:$(pythonversion)-$(VARIANT_python))) +$(foreach pythonversion, $(VERSIONS_python), $(eval CONTAINER_python$(pythonversion)-slim = python:$(pythonversion)-slim-$(VARIANT_python))) CONFIGURE_python ?= python --config=/usr/local/bin/python3-config INSTALL_python ?= python3-install RUN_python ?= /bin/true @@ -83,17 +85,17 @@ RUN_wasm ?= /bin/true define MODULE_PREBUILD_wasm apt-get install --no-install-recommends --no-install-suggests -y libclang-dev \\\n \ -\ \ \ \&\& export RUST_VERSION=1.76.0 \\\n \ +\ \ \ \&\& export RUST_VERSION=1.80.1 \\\n \ \ \ \ \&\& export RUSTUP_HOME=/usr/src/unit/rustup \\\n \ \ \ \ \&\& export CARGO_HOME=/usr/src/unit/cargo \\\n \ \ \ \ \&\& export PATH=/usr/src/unit/cargo/bin:\$$PATH \\\n \ \ \ \ \&\& dpkgArch="\$$\(dpkg --print-architecture\)" \\\n \ \ \ \ \&\& case "\$${dpkgArch##*-}" in \\\n \ -\ \ \ \ \ \ amd64\) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db" ;; \\\n \ -\ \ \ \ \ \ arm64\) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800" ;; \\\n \ +\ \ \ \ \ \ amd64\) rustArch="x86_64-unknown-linux-gnu"; rustupSha256="6aeece6993e902708983b209d04c0d1dbb14ebb405ddb87def578d41f920f56d" ;; \\\n \ +\ \ \ \ \ \ arm64\) rustArch="aarch64-unknown-linux-gnu"; rustupSha256="1cffbf51e63e634c746f741de50649bbbcbd9dbe1de363c9ecef64e278dba2b2" ;; \\\n \ \ \ \ \ \ \ *\) echo \>\&2 "unsupported architecture: \$${dpkgArch}"; exit 1 ;; \\\n \ \ \ \ \esac \\\n \ -\ \ \ \&\& url="https://static.rust-lang.org/rustup/archive/1.26.0/\$${rustArch}/rustup-init" \\\n \ +\ \ \ \&\& url="https://static.rust-lang.org/rustup/archive/1.27.1/\$${rustArch}/rustup-init" \\\n \ \ \ \ \&\& curl -L -O "\$$url" \\\n \ \ \ \ \&\& echo "\$${rustupSha256} *rustup-init" | sha256sum -c - \\\n \ \ \ \ \&\& chmod +x rustup-init \\\n \ @@ -109,9 +111,11 @@ endef default: @echo "valid targets: all build dockerfiles library clean" -MODVERSIONS = $(foreach module, $(MODULES), $(foreach modversion, $(shell for v in $(VERSIONS_$(module)); do echo $$v; done | sort -r), $(module)$(modversion))) wasm minimal +MODVERSIONS = $(foreach module, $(MODULES), $(foreach modversion, $(shell for v in $(VERSIONS_$(module)); do echo $$v; done | sort -r), $(module)$(modversion))) +MODVERSIONS += $(foreach module, $(MODULES_SLIM), $(foreach modversion, $(shell for v in $(VERSIONS_$(module)); do echo $$v; done | sort -r), $(module)$(modversion)-slim)) +MODVERSIONS += wasm minimal -modname = $(shell echo $1 | /usr/bin/tr -d '.01234567890-') +modname = $(shell echo $1 | /usr/bin/tr -d '.01234567890-' | sed 's/slim//') dockerfiles: $(addprefix Dockerfile., $(MODVERSIONS)) build: $(addprefix build-, $(MODVERSIONS)) @@ -132,6 +136,7 @@ Dockerfile.%: ../../version template.Dockerfile build-%: Dockerfile.% docker pull $(CONTAINER_$*) docker build --no-cache -t unit:$(VERSION)-$* -f Dockerfile.$* . + touch $@ library: @echo "# this file is generated via https://github.com/nginx/unit/blob/$(shell git describe --always --abbrev=0 HEAD)/pkg/docker/Makefile" @@ -141,10 +146,14 @@ library: @previous=""; \ for mod in $(MODVERSIONS); do \ echo ""; \ - modname="$$( echo $$mod | tr -d '.0123456789-' )"; \ - TAGS="$$mod $${mod%%.*} $$modname" ; \ + modname="$$( echo $$mod | tr -d '.0123456789')"; \ + modmajor="$${mod%%.*}"; \ + if test "$${mod#*slim}" != "$$mod"; then \ + modmajor="$${modmajor}-slim"; \ + fi; \ + TAGS="$$mod $$modmajor $$modname"; \ TAGS="$$(echo $$TAGS | tr " " "\n" | sort -u -r | tr "\n" "," | sed "s/,/, /g")"; \ - if [ "$$previous" = "$$modname" ]; then \ + if test "$${previous#*"$$modname"}" != "$$previous"; then \ echo "Tags: $(VERSION)-$$mod, $$mod"; \ else \ if [ "$$mod" = "minimal" ]; then \ @@ -158,7 +167,7 @@ library: echo "GitCommit: $(shell git describe --always --abbrev=0 HEAD)"; \ echo "Directory: pkg/docker"; \ echo "File: Dockerfile.$$mod"; \ - previous=$$(echo $$mod | tr -d '.0123456789-'); \ + previous="$$previous $$modname"; \ done diff: $(addprefix diff-, $(MODVERSIONS)) @@ -170,5 +179,6 @@ all: $(addprefix Dockerfile., $(MODVERSIONS)) clean: rm -f Dockerfile.* + rm -f build-* .PHONY: default build dockerfiles clean library diff --git a/pkg/rpm/Makefile b/pkg/rpm/Makefile index 1f3bbd58..f00b336a 100644 --- a/pkg/rpm/Makefile +++ b/pkg/rpm/Makefile @@ -10,9 +10,7 @@ RELEASE ?= $(DEFAULT_RELEASE) PACKAGE_VENDOR = NGINX Packaging <nginx-packaging@f5.com> -ifeq ($(shell test `rpm --eval '0%{?rhel} -eq 7 -a 0%{?amzn} -eq 0'`; echo $$?), 0) -OSVER = centos7 -else ifeq ($(shell rpm --eval "%{?rhel}"), 8) +ifeq ($(shell rpm --eval "%{?rhel}"), 8) OSVER = centos8 else ifeq ($(shell rpm --eval "%{?rhel}"), 9) OSVER = centos9 @@ -20,19 +18,11 @@ else ifeq ($(shell rpm --eval "%{?amzn}"), 2) OSVER = amazonlinux2 else ifeq ($(shell rpm --eval "%{?amzn}"), 2023) OSVER = amazonlinux2023 -else ifeq ($(shell test `rpm --eval '0%{?fedora} -ge 35 -a 0%{?fedora} -le 36'`; echo $$?),0) -OSVER = fedora -else ifeq ($(shell test `rpm --eval '0%{?fedora} -ge 37 -a 0%{?fedora} -le 38'`; echo $$?),0) -OSVER = fedora37 else ifeq ($(shell test `rpm --eval '0%{?fedora} -ge 39'`; echo $$?),0) OSVER = fedora39 endif -BUILD_DEPENDS_unit = gcc rpm-build rpmlint - -ifeq ($(OSVER), centos7) -BUILD_DEPENDS_unit += which -endif +BUILD_DEPENDS_unit = gcc rpm-build rpmlint clang llvm ifneq (,$(findstring $(OSVER),amazonlinux2)) BUILD_DEPENDS_unit += libxml2 libxslt openssl11-devel @@ -44,17 +34,6 @@ BUILD_DEPENDS = $(BUILD_DEPENDS_unit) MODULES= -ifeq ($(OSVER), centos7) -include Makefile.php -include Makefile.python27 -include Makefile.python36 -include Makefile.go -include Makefile.perl -include Makefile.jsc-common -include Makefile.jsc8 -include Makefile.jsc11 -endif - ifeq ($(OSVER), centos8) include Makefile.php include Makefile.python27 @@ -102,30 +81,6 @@ include Makefile.jsc17 include Makefile.wasm endif -ifeq ($(OSVER), fedora) -include Makefile.php -include Makefile.python310 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc8 -include Makefile.jsc11 -include Makefile.wasm -endif - -ifeq ($(OSVER), fedora37) -include Makefile.php -include Makefile.python311 -include Makefile.go -include Makefile.perl -include Makefile.ruby -include Makefile.jsc-common -include Makefile.jsc8 -include Makefile.jsc11 -include Makefile.wasm -endif - ifeq ($(OSVER), fedora39) include Makefile.php include Makefile.python312 diff --git a/pkg/rpm/Makefile.go b/pkg/rpm/Makefile.go index aacbe2b9..335b207a 100644 --- a/pkg/rpm/Makefile.go +++ b/pkg/rpm/Makefile.go @@ -13,11 +13,7 @@ MODULE_INSTARGS_go= go-install-src MODULE_SOURCES_go= unit.example-go-app \ unit.example-go-config -ifeq ($(OSVER), centos6) -BUILD_DEPENDS_go= epel-release golang -else BUILD_DEPENDS_go= golang -endif BUILD_DEPENDS+= $(BUILD_DEPENDS_go) diff --git a/pkg/rpm/Makefile.python27 b/pkg/rpm/Makefile.python27 index 3de5f634..aeb1248f 100644 --- a/pkg/rpm/Makefile.python27 +++ b/pkg/rpm/Makefile.python27 @@ -13,9 +13,9 @@ MODULE_INSTARGS_python27= python2.7-install MODULE_SOURCES_python27= unit.example-python-app \ unit.example-python27-config -ifneq (,$(findstring $(OSVER),fedora centos8)) +ifneq (,$(findstring $(OSVER),centos8)) BUILD_DEPENDS_python27= python2-devel -else ifneq (,$(findstring $(OSVER),centos7 amazonlinux2)) +else ifneq (,$(findstring $(OSVER),amazonlinux2)) BUILD_DEPENDS_python27= python-devel else BUILD_DEPENDS_python27= python27-devel @@ -24,7 +24,7 @@ endif BUILD_DEPENDS+= $(BUILD_DEPENDS_python27) define MODULE_DEFINITIONS_python27 -%if (0%{?rhel} == 7) || (0%{?amzn} == 2) +%if 0%{?amzn} == 2 Obsoletes: unit-python %endif endef diff --git a/pkg/rpm/Makefile.python310 b/pkg/rpm/Makefile.python310 deleted file mode 100644 index 50731475..00000000 --- a/pkg/rpm/Makefile.python310 +++ /dev/null @@ -1,57 +0,0 @@ -MODULES+= python310 -MODULE_SUFFIX_python310= python3.10 - -MODULE_SUMMARY_python310= Python 3.10 module for NGINX Unit - -MODULE_VERSION_python310= $(VERSION) -MODULE_RELEASE_python310= 1 - -MODULE_CONFARGS_python310= python --config=python3.10-config -MODULE_MAKEARGS_python310= python3.10 -MODULE_INSTARGS_python310= python3.10-install - -MODULE_SOURCES_python310= unit.example-python-app \ - unit.example-python310-config - -ifneq (,$(findstring $(OSVER),fedora amazonlinux2)) -BUILD_DEPENDS_python310= python3-devel -else -BUILD_DEPENDS_python310= python310-devel -endif - -BUILD_DEPENDS+= $(BUILD_DEPENDS_python310) - -define MODULE_PREINSTALL_python310 -%{__mkdir} -p %{buildroot}%{_datadir}/doc/unit-python310/examples/python-app -%{__install} -m 644 -p %{SOURCE100} \ - %{buildroot}%{_datadir}/doc/unit-python310/examples/python-app/wsgi.py -%{__install} -m 644 -p %{SOURCE101} \ - %{buildroot}%{_datadir}/doc/unit-python310/examples/unit.config -endef -export MODULE_PREINSTALL_python310 - -define MODULE_FILES_python310 -%{_libdir}/unit/modules/* -%{_libdir}/unit/debug-modules/* -endef -export MODULE_FILES_python310 - -define MODULE_POST_python310 -cat <<BANNER ----------------------------------------------------------------------- - -The $(MODULE_SUMMARY_python310) has been installed. - -To check the sample app, run these commands: - - sudo service unit start - cd /usr/share/doc/%{name}/examples - sudo curl -X PUT --data-binary @unit.config --unix-socket /var/run/unit/control.sock http://localhost/config - curl http://localhost:8400/ - -Online documentation is available at https://unit.nginx.org - ----------------------------------------------------------------------- -BANNER -endef -export MODULE_POST_python310 diff --git a/pkg/rpm/Makefile.python311 b/pkg/rpm/Makefile.python311 index ae58d722..10c9c69b 100644 --- a/pkg/rpm/Makefile.python311 +++ b/pkg/rpm/Makefile.python311 @@ -13,11 +13,7 @@ MODULE_INSTARGS_python311= python3.11-install MODULE_SOURCES_python311= unit.example-python-app \ unit.example-python311-config -ifneq (,$(findstring $(OSVER),fedora37)) -BUILD_DEPENDS_python311= python3-devel -else BUILD_DEPENDS_python311= python3.11-devel -endif BUILD_DEPENDS+= $(BUILD_DEPENDS_python311) diff --git a/pkg/rpm/Makefile.python36 b/pkg/rpm/Makefile.python36 index c1fc8b6c..7019b819 100644 --- a/pkg/rpm/Makefile.python36 +++ b/pkg/rpm/Makefile.python36 @@ -13,11 +13,7 @@ MODULE_INSTARGS_python36= python3.6-install MODULE_SOURCES_python36= unit.example-python-app \ unit.example-python36-config -ifneq (,$(findstring $(OSVER),fedora centos7)) -BUILD_DEPENDS_python36= python3-devel -else BUILD_DEPENDS_python36= python36-devel -endif BUILD_DEPENDS+= $(BUILD_DEPENDS_python36) diff --git a/pkg/rpm/Makefile.python37 b/pkg/rpm/Makefile.python37 index c0604fd9..1feae6a4 100644 --- a/pkg/rpm/Makefile.python37 +++ b/pkg/rpm/Makefile.python37 @@ -13,7 +13,7 @@ MODULE_INSTARGS_python37= python3.7-install MODULE_SOURCES_python37= unit.example-python-app \ unit.example-python37-config -ifneq (,$(findstring $(OSVER),fedora amazonlinux2)) +ifneq (,$(findstring $(OSVER),amazonlinux2)) BUILD_DEPENDS_python37= python3-devel else BUILD_DEPENDS_python37= python37-devel diff --git a/pkg/rpm/Makefile.python39 b/pkg/rpm/Makefile.python39 index 7a3ae0b0..25f2a2a8 100644 --- a/pkg/rpm/Makefile.python39 +++ b/pkg/rpm/Makefile.python39 @@ -13,7 +13,7 @@ MODULE_INSTARGS_python39= python3.9-install MODULE_SOURCES_python39= unit.example-python-app \ unit.example-python39-config -ifneq (,$(findstring $(OSVER),fedora amazonlinux2 amazonlinux2023 centos9)) +ifneq (,$(findstring $(OSVER),amazonlinux2 amazonlinux2023 centos9)) BUILD_DEPENDS_python39= python3-devel else BUILD_DEPENDS_python39= python39-devel diff --git a/pkg/rpm/Makefile.wasm b/pkg/rpm/Makefile.wasm index cb2ad35a..fe5c6808 100644 --- a/pkg/rpm/Makefile.wasm +++ b/pkg/rpm/Makefile.wasm @@ -6,13 +6,18 @@ MODULE_SUMMARY_wasm= WASM module for NGINX Unit MODULE_VERSION_wasm= $(VERSION) MODULE_RELEASE_wasm= 1 -MODULE_CONFARGS_wasm= wasm --include-path=\`pwd\`/pkg/contrib/wasmtime/crates/c-api/include --lib-path=\`pwd\`/pkg/contrib/wasmtime/target/release \&\& ./configure wasm-wasi-component -MODULE_MAKEARGS_wasm= wasm wasm-wasi-component CFLAGS=\"\$$(grep ^CFLAGS build/Makefile | cut -d' ' -f 3-) -Wno-missing-prototypes\" -MODULE_INSTARGS_wasm= wasm-install wasm-wasi-component-install +MODULE_CONFARGS_wasm= wasm-wasi-component +MODULE_MAKEARGS_wasm= wasm-wasi-component CFLAGS=\"\$$(grep ^CFLAGS build/Makefile | cut -d' ' -f 3-) -Wno-missing-prototypes\" +MODULE_INSTARGS_wasm= wasm-wasi-component-install + +ifeq (,$(findstring $(OSVER),amazonlinux2)) +MODULE_CONFARGS_wasm+= \&\& ./configure wasm --include-path=\`pwd\`/pkg/contrib/wasmtime/artifacts/include --lib-path=\`pwd\`/pkg/contrib/wasmtime/artifacts/lib +MODULE_MAKEARGS_wasm+= wasm +MODULE_INSTARGS_wasm+= wasm-install MODULE_SOURCES_wasm= -BUILD_DEPENDS_wasm= +BUILD_DEPENDS_wasm= cmake BUILD_DEPENDS+= $(BUILD_DEPENDS_wasm) @@ -22,16 +27,23 @@ endef export MODULE_PREBUILD_wasm define MODULE_PREINSTALL_wasm +\# brp-mangle-shebangs parses all executable files for a shebang +\# this fails on a vendored code that somehow ships with exec bit enabled +find pkg/contrib/wasmtime/ -type f -executable -name "*.rs" | xargs chmod -x endef export MODULE_PREINSTALL_wasm define MODULE_POSTINSTALL_wasm -%{__install} -m 755 -p pkg/contrib/wasmtime/target/release/libwasmtime.so %{buildroot}%{_libdir}/ +%{__install} -m 755 -p pkg/contrib/wasmtime/artifacts/lib/libwasmtime.so %{buildroot}%{_libdir}/ endef export MODULE_POSTINSTALL_wasm +endif define MODULE_FILES_wasm +%if 0%{?amzn2} +%else %{_libdir}/libwasmtime.so +%endif %{_libdir}/unit/modules/* %{_libdir}/unit/debug-modules/* endef diff --git a/pkg/rpm/rpmbuild/SOURCES/unit.example-python310-config b/pkg/rpm/rpmbuild/SOURCES/unit.example-python310-config deleted file mode 100644 index 8a73ca53..00000000 --- a/pkg/rpm/rpmbuild/SOURCES/unit.example-python310-config +++ /dev/null @@ -1,16 +0,0 @@ -{ - "applications": { - "example_python": { - "type": "python 3.10", - "processes": 2, - "path": "/usr/share/doc/unit-python310/examples/python-app", - "module": "wsgi" - } - }, - - "listeners": { - "*:8400": { - "pass": "applications/example_python" - } - } -} diff --git a/pkg/rpm/rpmbuild/SOURCES/unit.service b/pkg/rpm/rpmbuild/SOURCES/unit.service index 6df00fbb..f52d96da 100644 --- a/pkg/rpm/rpmbuild/SOURCES/unit.service +++ b/pkg/rpm/rpmbuild/SOURCES/unit.service @@ -15,12 +15,13 @@ Wants=network-online.target After=network-online.target [Service] -Type=simple +Type=forking Environment="UNITD_OPTIONS=--log /var/log/unit/unit.log --pid /var/run/unit/unit.pid" -ExecStart=/usr/sbin/unitd $UNITD_OPTIONS --no-daemon +ExecStart=/usr/sbin/unitd $UNITD_OPTIONS ExecReload= RuntimeDirectory=unit RuntimeDirectoryMode=0755 +PIDFile=/run/unit/unit.pid [Install] WantedBy=multi-user.target diff --git a/pkg/rpm/unit.module.spec.in b/pkg/rpm/unit.module.spec.in index b3d5d94b..7d1d1b0e 100644 --- a/pkg/rpm/unit.module.spec.in +++ b/pkg/rpm/unit.module.spec.in @@ -1,10 +1,6 @@ # distribution specific definitions %define bdir %{_builddir}/%{name}-%{version} -%if (0%{?rhel} == 7 && 0%{?amzn} == 0) -%define dist .el7 -%endif - %%MODULE_DEFINITIONS%% %if 0%{?rhel}%{?fedora} @@ -30,13 +26,10 @@ Release: %%RELEASE%%%{?dist}.ngx License: ASL 2.0 Vendor: %%PACKAGE_VENDOR%% URL: https://unit.nginx.org/ -Group: System Environment/Daemons Source0: unit-%{version}.tar.gz %%MODULE_SOURCES%% -BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) - BuildRequires: pcre2-devel Requires: unit-r%%UNIT_VERSION%% @@ -77,7 +70,6 @@ make %%MODULE_MAKEARGS%% %{__mv} build build-nodebug %install -%{__rm} -rf %{buildroot} %{__mkdir} -p %{buildroot}%{_datadir}/doc/%%NAME%% if [ `basename %{SOURCE100}` == COPYRIGHT.%{name} ]; then %{__install} -m 644 -p %{SOURCE100} \ @@ -100,16 +92,12 @@ cd %{bdir} grep -v 'usr/src' debugfiles.list > debugfiles.list.new && mv debugfiles.list.new debugfiles.list cat /dev/null > debugsources.list -%clean -%{__rm} -rf %{buildroot} - %post if [ $1 -eq 1 ]; then %%MODULE_POST%% fi %files -%defattr(-,root,root,-) %dir %{_datadir}/doc/%%NAME%% %{_datadir}/doc/%%NAME%%/* %%MODULE_FILES%% diff --git a/pkg/rpm/unit.spec.in b/pkg/rpm/unit.spec.in index 01323650..9360ff7e 100644 --- a/pkg/rpm/unit.spec.in +++ b/pkg/rpm/unit.spec.in @@ -31,7 +31,6 @@ Release: %%RELEASE%%%{?dist}.ngx License: ASL 2.0 Vendor: %%PACKAGE_VENDOR%% URL: https://unit.nginx.org/ -Group: System Environment/Daemons Source0: unit-%{version}.tar.gz Source1: unit.service @@ -39,7 +38,6 @@ Source2: unit-debug.service Source3: unit.example.config Source4: unit.logrotate -BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) BuildRequires: systemd Requires(post): systemd Requires(preun): systemd @@ -68,7 +66,6 @@ dynamically via an API. Summary: NGINX Unit (development files) Version: %%VERSION%% Release: %%RELEASE%%%{?dist}.ngx -Group: Development/Libraries Requires: unit == %%VERSION%%-%%RELEASE%%%{?dist}.ngx %description devel Library and include files required for NGINX Unit modules development. @@ -106,7 +103,6 @@ PKG_CONFIG_PATH=%{bdir}/pkg/contrib/njs/build \ %endif %install -%{__rm} -rf %{buildroot} %{__ln_s} build-nodebug build DESTDIR=%{buildroot} make unitd-install libunit-install manpage-install %{__mkdir} -p %{buildroot}%{_bindir} @@ -160,9 +156,6 @@ cd %{bdir} grep -v 'usr/src' debugfiles.list > debugfiles.list.new && mv debugfiles.list.new debugfiles.list cat /dev/null > debugsources.list -%clean -%{__rm} -rf %{buildroot} - %post getent group unit >/dev/null || groupadd -r unit getent passwd unit >/dev/null || \ @@ -213,7 +206,6 @@ More info: https://unit.nginx.org/installation/#official-packages BANNER %files -%defattr(-,root,root,-) %attr(0755,root,root) %{_bindir}/unitc %attr(0755,root,root) %{_bindir}/setup-unit %attr(0755,root,root) %{_sbindir}/unitd diff --git a/src/nxt_application.c b/src/nxt_application.c index e0247bf0..629aa11c 100644 --- a/src/nxt_application.c +++ b/src/nxt_application.c @@ -32,6 +32,7 @@ typedef struct { nxt_app_type_t type; + nxt_str_t name; nxt_str_t version; nxt_str_t file; nxt_array_t *mounts; @@ -257,12 +258,14 @@ nxt_discovery_modules(nxt_task_t *task, const char *path) module[i].type, &module[i].version, &module[i].file); size += nxt_length("{\"type\": ,"); + size += nxt_length(" \"name\": \"\","); size += nxt_length(" \"version\": \"\","); size += nxt_length(" \"file\": \"\","); size += nxt_length(" \"mounts\": []},"); size += NXT_INT_T_LEN + module[i].version.length + + module[i].name.length + module[i].file.length; mounts = module[i].mounts; @@ -294,9 +297,10 @@ nxt_discovery_modules(nxt_task_t *task, const char *path) for (i = 0; i < n; i++) { mounts = module[i].mounts; - p = nxt_sprintf(p, end, "{\"type\": %d, \"version\": \"%V\", " - "\"file\": \"%V\", \"mounts\": [", - module[i].type, &module[i].version, &module[i].file); + p = nxt_sprintf(p, end, "{\"type\": %d, \"name\": \"%V\", " + "\"version\": \"%V\", \"file\": \"%V\", \"mounts\": [", + module[i].type, &module[i].name, &module[i].version, + &module[i].file); mnt = mounts->elts; for (j = 0; j < mounts->nelts; j++) { @@ -412,6 +416,11 @@ nxt_discovery_module(nxt_task_t *task, nxt_mp_t *mp, nxt_array_t *modules, goto fail; } + nxt_str_dup(mp, &module->name, &app->type); + if (module->name.start == NULL) { + goto fail; + } + module->file.length = nxt_strlen(name); module->file.start = nxt_mp_alloc(mp, module->file.length); diff --git a/src/nxt_application.h b/src/nxt_application.h index f5d7a9df..a3b4230a 100644 --- a/src/nxt_application.h +++ b/src/nxt_application.h @@ -35,6 +35,7 @@ typedef nxt_int_t (*nxt_application_setup_t)(nxt_task_t *task, typedef struct { nxt_app_type_t type; + char *name; u_char *version; char *file; nxt_app_module_t *module; diff --git a/src/nxt_atomic.h b/src/nxt_atomic.h index dae999a9..376375c5 100644 --- a/src/nxt_atomic.h +++ b/src/nxt_atomic.h @@ -67,81 +67,6 @@ typedef volatile nxt_atomic_uint_t nxt_atomic_t; #endif -#elif (NXT_HAVE_SOLARIS_ATOMIC) /* Solaris 10 */ - -#include <atomic.h> - -typedef long nxt_atomic_int_t; -typedef ulong_t nxt_atomic_uint_t; -typedef volatile nxt_atomic_uint_t nxt_atomic_t; - - -#define nxt_atomic_cmp_set(lock, cmp, set) \ - (atomic_cas_ulong(lock, cmp, set) == (ulong_t) cmp) - - -#define nxt_atomic_xchg(lock, set) \ - atomic_add_swap(lock, set) - - -#define nxt_atomic_fetch_add(value, add) \ - (atomic_add_long_nv(value, add) - add) - - -#define nxt_atomic_or_fetch(ptr, val) \ - atomic_or_ulong_nv(ptr, val) - - -#define nxt_atomic_and_fetch(ptr, val) \ - atomic_and_ulong_nv(ptr, val) - - -/* - * Solaris uses SPARC Total Store Order model. In this model: - * 1) Each atomic load-store instruction behaves as if it were followed by - * #LoadLoad, #LoadStore, and #StoreStore barriers. - * 2) Each load instruction behaves as if it were followed by - * #LoadLoad and #LoadStore barriers. - * 3) Each store instruction behaves as if it were followed by - * #StoreStore barrier. - * - * In X86_64 atomic instructions set a full barrier and usual instructions - * set implicit #LoadLoad, #LoadStore, and #StoreStore barriers. - * - * An acquire barrier requires at least #LoadLoad and #LoadStore barriers - * and they are provided by atomic load-store instruction. - * - * A release barrier requires at least #LoadStore and #StoreStore barriers, - * so a lock release does not require an explicit barrier: all load - * instructions in critical section is followed by implicit #LoadStore - * barrier and all store instructions are followed by implicit #StoreStore - * barrier. - */ - -#define nxt_atomic_try_lock(lock) \ - nxt_atomic_cmp_set(lock, 0, 1) - - -#define nxt_atomic_release(lock) \ - *lock = 0; - - -/* - * The "rep; nop" is used instead of "pause" to omit the "[ PAUSE ]" hardware - * capability added by linker since Solaris ld.so.1 does not know about it: - * - * ld.so.1: ...: fatal: hardware capability unsupported: 0x2000 [ PAUSE ] - */ - -#if (__i386__ || __i386 || __amd64__ || __amd64) -#define nxt_cpu_pause() \ - __asm__ ("rep; nop") - -#else -#define nxt_cpu_pause() -#endif - - /* elif (NXT_HAVE_MACOSX_ATOMIC) */ /* @@ -161,109 +86,7 @@ typedef volatile nxt_atomic_uint_t nxt_atomic_t; */ -#elif (NXT_HAVE_XLC_ATOMIC) /* XL C/C++ V8.0 for AIX */ - -#if (NXT_64BIT) - -typedef long nxt_atomic_int_t; -typedef unsigned long nxt_atomic_uint_t; -typedef volatile nxt_atomic_int_t nxt_atomic_t; - - -nxt_inline nxt_bool_t -nxt_atomic_cmp_set(nxt_atomic_t *lock, nxt_atomic_int_t cmp, - nxt_atomic_int_t set) -{ - nxt_atomic_int_t old; - - old = cmp; - - return __compare_and_swaplp(lock, &old, set); -} - - -#define nxt_atomic_xchg(lock, set) \ - __fetch_and_swaplp(lock, set) - - -#define nxt_atomic_fetch_add(value, add) \ - __fetch_and_addlp(value, add) - - -#else /* NXT_32BIT */ - -typedef int nxt_atomic_int_t; -typedef unsigned int nxt_atomic_uint_t; -typedef volatile nxt_atomic_int_t nxt_atomic_t; - - -nxt_inline nxt_bool_t -nxt_atomic_cmp_set(nxt_atomic_t *lock, nxt_atomic_int_t cmp, - nxt_atomic_int_t set) -{ - nxt_atomic_int_t old; - - old = cmp; - - return __compare_and_swap(lock, &old, set); -} - - -#define nxt_atomic_xchg(lock, set) \ - __fetch_and_swap(lock, set) - - -#define nxt_atomic_fetch_add(value, add) \ - __fetch_and_add(value, add) - - -#endif /* NXT_32BIT*/ - - -/* - * __lwsync() is a "lwsync" instruction that sets #LoadLoad, #LoadStore, - * and #StoreStore barrier. - * - * __compare_and_swap() is a pair of "ldarx" and "stdcx" instructions. - * A "lwsync" does not set #StoreLoad barrier so it can not be used after - * this pair since a next load inside critical section can be performed - * after the "ldarx" instruction but before the "stdcx" instruction. - * However, this next load instruction will load correct data because - * otherwise the "ldarx/stdcx" pair will fail and this data will be - * discarded. Nevertheless, the "isync" instruction is used for sure. - * - * A full barrier can be set with __sync(), a "sync" instruction, but there - * is also a faster __isync(), an "isync" instruction. This instruction is - * not a memory barrier but an instruction barrier. An "isync" instruction - * causes the processor to complete execution of all previous instructions - * and then to discard instructions (which may have begun execution) following - * the "isync". After the "isync" is executed, the following instructions - * then begin execution. The "isync" is used to ensure that the loads - * following entry into a critical section are not performed (because of - * aggressive out-of-order or speculative execution in the processor) until - * the lock is granted. - */ - -nxt_inline nxt_bool_t -nxt_atomic_try_lock(nxt_atomic_t *lock) -{ - if (nxt_atomic_cmp_set(lock, 0, 1)) { - __isync(); - return 1; - } - - return 0; -} - - -#define nxt_atomic_release(lock) \ - do { __lwsync(); *lock = 0; } while (0) - - -#define nxt_cpu_pause() - - -#endif /* NXT_HAVE_XLC_ATOMIC */ +#endif /* NXT_HAVE_GCC_ATOMIC */ #endif /* _NXT_ATOMIC_H_INCLUDED_ */ diff --git a/src/nxt_buf.h b/src/nxt_buf.h index f1e2879f..a561ef4e 100644 --- a/src/nxt_buf.h +++ b/src/nxt_buf.h @@ -13,7 +13,7 @@ * should be allocated by appropriate nxt_buf_XXX_alloc() function. * * 1) Memory-only buffers, their size is less than nxt_buf_t size, it - * is equal to offsetof(nxt_buf_t, file_pos), that is it is nxt_buf_t + * is equal to offsetof(nxt_buf_t, file), that is it is nxt_buf_t * without file and mmap part. The buffers are frequently used, so * the reduction allows to save 20-32 bytes depending on platform. * diff --git a/src/nxt_cgroup.c b/src/nxt_cgroup.c index 2c404acc..79e240f1 100644 --- a/src/nxt_cgroup.c +++ b/src/nxt_cgroup.c @@ -34,7 +34,7 @@ nxt_cgroup_proc_add(nxt_task_t *task, nxt_process_t *process) return NXT_ERROR; } - ret = nxt_fs_mkdir_all((const u_char *) cgprocs, 0777); + ret = nxt_fs_mkdir_p((const u_char *) cgprocs, 0777); if (nxt_slow_path(ret == NXT_ERROR)) { return NXT_ERROR; } diff --git a/src/nxt_conf.c b/src/nxt_conf.c index 008cb968..bb229c33 100644 --- a/src/nxt_conf.c +++ b/src/nxt_conf.c @@ -123,23 +123,24 @@ static u_char *nxt_conf_json_parse_number(nxt_mp_t *mp, nxt_conf_value_t *value, static void nxt_conf_json_parse_error(nxt_conf_json_error_t *error, u_char *pos, const char *detail); -static nxt_int_t nxt_conf_copy_value(nxt_mp_t *mp, nxt_conf_op_t *op, - nxt_conf_value_t *dst, nxt_conf_value_t *src); -static nxt_int_t nxt_conf_copy_array(nxt_mp_t *mp, nxt_conf_op_t *op, - nxt_conf_value_t *dst, nxt_conf_value_t *src); -static nxt_int_t nxt_conf_copy_object(nxt_mp_t *mp, nxt_conf_op_t *op, - nxt_conf_value_t *dst, nxt_conf_value_t *src); - -static size_t nxt_conf_json_string_length(nxt_conf_value_t *value); -static u_char *nxt_conf_json_print_string(u_char *p, nxt_conf_value_t *value); -static size_t nxt_conf_json_array_length(nxt_conf_value_t *value, +static nxt_int_t nxt_conf_copy_value(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src); +static nxt_int_t nxt_conf_copy_array(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src); +static nxt_int_t nxt_conf_copy_object(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src); + +static size_t nxt_conf_json_string_length(const nxt_conf_value_t *value); +static u_char *nxt_conf_json_print_string(u_char *p, + const nxt_conf_value_t *value); +static size_t nxt_conf_json_array_length(const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); -static u_char *nxt_conf_json_print_array(u_char *p, nxt_conf_value_t *value, - nxt_conf_json_pretty_t *pretty); -static size_t nxt_conf_json_object_length(nxt_conf_value_t *value, - nxt_conf_json_pretty_t *pretty); -static u_char *nxt_conf_json_print_object(u_char *p, nxt_conf_value_t *value, +static u_char *nxt_conf_json_print_array(u_char *p, + const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); +static size_t nxt_conf_json_object_length(const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); +static u_char *nxt_conf_json_print_object(u_char *p, + const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); static size_t nxt_conf_json_escape_length(u_char *p, size_t size); static u_char *nxt_conf_json_escape(u_char *dst, u_char *src, size_t size); @@ -162,21 +163,22 @@ nxt_conf_json_indentation(u_char *p, uint32_t level) void -nxt_conf_get_string(nxt_conf_value_t *value, nxt_str_t *str) +nxt_conf_get_string(const nxt_conf_value_t *value, nxt_str_t *str) { if (value->type == NXT_CONF_VALUE_SHORT_STRING) { str->length = value->u.str.length; - str->start = value->u.str.start; + str->start = (u_char *) value->u.str.start; } else { str->length = value->u.string.length; - str->start = value->u.string.start; + str->start = (u_char *) value->u.string.start; } } nxt_str_t * -nxt_conf_get_string_dup(nxt_conf_value_t *value, nxt_mp_t *mp, nxt_str_t *str) +nxt_conf_get_string_dup(const nxt_conf_value_t *value, nxt_mp_t *mp, + nxt_str_t *str) { nxt_str_t s; @@ -186,7 +188,7 @@ nxt_conf_get_string_dup(nxt_conf_value_t *value, nxt_mp_t *mp, nxt_str_t *str) void -nxt_conf_set_string(nxt_conf_value_t *value, nxt_str_t *str) +nxt_conf_set_string(nxt_conf_value_t *value, const nxt_str_t *str) { if (str->length > NXT_CONF_MAX_SHORT_STRING) { value->type = NXT_CONF_VALUE_STRING; @@ -245,7 +247,7 @@ nxt_conf_get_boolean(nxt_conf_value_t *value) nxt_uint_t -nxt_conf_object_members_count(nxt_conf_value_t *value) +nxt_conf_object_members_count(const nxt_conf_value_t *value) { return value->u.object->count; } @@ -276,7 +278,7 @@ nxt_conf_create_object(nxt_mp_t *mp, nxt_uint_t count) void -nxt_conf_set_member(nxt_conf_value_t *object, nxt_str_t *name, +nxt_conf_set_member(nxt_conf_value_t *object, const nxt_str_t *name, const nxt_conf_value_t *value, uint32_t index) { nxt_conf_object_member_t *member; @@ -290,8 +292,8 @@ nxt_conf_set_member(nxt_conf_value_t *object, nxt_str_t *name, nxt_int_t -nxt_conf_set_member_dup(nxt_conf_value_t *object, nxt_mp_t *mp, nxt_str_t *name, - nxt_conf_value_t *value, uint32_t index) +nxt_conf_set_member_dup(nxt_conf_value_t *object, nxt_mp_t *mp, + const nxt_str_t *name, const nxt_conf_value_t *value, uint32_t index) { nxt_conf_object_member_t *member; @@ -304,8 +306,8 @@ nxt_conf_set_member_dup(nxt_conf_value_t *object, nxt_mp_t *mp, nxt_str_t *name, void -nxt_conf_set_member_string(nxt_conf_value_t *object, nxt_str_t *name, - nxt_str_t *value, uint32_t index) +nxt_conf_set_member_string(nxt_conf_value_t *object, const nxt_str_t *name, + const nxt_str_t *value, uint32_t index) { nxt_conf_object_member_t *member; @@ -319,7 +321,7 @@ nxt_conf_set_member_string(nxt_conf_value_t *object, nxt_str_t *name, nxt_int_t nxt_conf_set_member_string_dup(nxt_conf_value_t *object, nxt_mp_t *mp, - nxt_str_t *name, nxt_str_t *value, uint32_t index) + const nxt_str_t *name, const nxt_str_t *value, uint32_t index) { nxt_conf_object_member_t *member; @@ -332,7 +334,7 @@ nxt_conf_set_member_string_dup(nxt_conf_value_t *object, nxt_mp_t *mp, void -nxt_conf_set_member_integer(nxt_conf_value_t *object, nxt_str_t *name, +nxt_conf_set_member_integer(nxt_conf_value_t *object, const nxt_str_t *name, int64_t value, uint32_t index) { u_char *p, *end; @@ -353,7 +355,7 @@ nxt_conf_set_member_integer(nxt_conf_value_t *object, nxt_str_t *name, void -nxt_conf_set_member_null(nxt_conf_value_t *object, nxt_str_t *name, +nxt_conf_set_member_null(nxt_conf_value_t *object, const nxt_str_t *name, uint32_t index) { nxt_conf_object_member_t *member; @@ -400,7 +402,7 @@ nxt_conf_set_element(nxt_conf_value_t *array, nxt_uint_t index, nxt_int_t nxt_conf_set_element_string_dup(nxt_conf_value_t *array, nxt_mp_t *mp, - nxt_uint_t index, nxt_str_t *value) + nxt_uint_t index, const nxt_str_t *value) { nxt_conf_value_t *element; @@ -411,21 +413,21 @@ nxt_conf_set_element_string_dup(nxt_conf_value_t *array, nxt_mp_t *mp, nxt_uint_t -nxt_conf_array_elements_count(nxt_conf_value_t *value) +nxt_conf_array_elements_count(const nxt_conf_value_t *value) { return value->u.array->count; } nxt_uint_t -nxt_conf_array_elements_count_or_1(nxt_conf_value_t *value) +nxt_conf_array_elements_count_or_1(const nxt_conf_value_t *value) { return (value->type == NXT_CONF_VALUE_ARRAY) ? value->u.array->count : 1; } nxt_uint_t -nxt_conf_type(nxt_conf_value_t *value) +nxt_conf_type(const nxt_conf_value_t *value) { switch (value->type) { @@ -459,7 +461,7 @@ nxt_conf_type(nxt_conf_value_t *value) nxt_conf_value_t * -nxt_conf_get_path(nxt_conf_value_t *value, nxt_str_t *path) +nxt_conf_get_path(nxt_conf_value_t *value, const nxt_str_t *path) { nxt_str_t token; nxt_int_t ret, index; @@ -550,7 +552,7 @@ nxt_conf_path_next_token(nxt_conf_path_parse_t *parse, nxt_str_t *token) nxt_conf_value_t * -nxt_conf_get_object_member(nxt_conf_value_t *value, nxt_str_t *name, +nxt_conf_get_object_member(const nxt_conf_value_t *value, const nxt_str_t *name, uint32_t *index) { nxt_str_t str; @@ -584,8 +586,8 @@ nxt_conf_get_object_member(nxt_conf_value_t *value, nxt_str_t *name, nxt_int_t -nxt_conf_map_object(nxt_mp_t *mp, nxt_conf_value_t *value, nxt_conf_map_t *map, - nxt_uint_t n, void *data) +nxt_conf_map_object(nxt_mp_t *mp, const nxt_conf_value_t *value, + const nxt_conf_map_t *map, nxt_uint_t n, void *data) { double num; nxt_str_t str, *s; @@ -736,7 +738,7 @@ nxt_conf_map_object(nxt_mp_t *mp, nxt_conf_value_t *value, nxt_conf_map_t *map, nxt_conf_value_t * -nxt_conf_next_object_member(nxt_conf_value_t *value, nxt_str_t *name, +nxt_conf_next_object_member(const nxt_conf_value_t *value, nxt_str_t *name, uint32_t *next) { uint32_t n; @@ -764,7 +766,7 @@ nxt_conf_next_object_member(nxt_conf_value_t *value, nxt_str_t *name, nxt_conf_value_t * -nxt_conf_get_array_element(nxt_conf_value_t *value, uint32_t index) +nxt_conf_get_array_element(const nxt_conf_value_t *value, uint32_t index) { nxt_conf_array_t *array; @@ -802,7 +804,7 @@ nxt_conf_get_array_element_or_itself(nxt_conf_value_t *value, uint32_t index) void -nxt_conf_array_qsort(nxt_conf_value_t *value, +nxt_conf_array_qsort(const nxt_conf_value_t *value, int (*compare)(const void *, const void *)) { nxt_conf_array_t *array; @@ -818,8 +820,9 @@ nxt_conf_array_qsort(nxt_conf_value_t *value, nxt_conf_op_ret_t -nxt_conf_op_compile(nxt_mp_t *mp, nxt_conf_op_t **ops, nxt_conf_value_t *root, - nxt_str_t *path, nxt_conf_value_t *value, nxt_bool_t add) +nxt_conf_op_compile(nxt_mp_t *mp, nxt_conf_op_t **ops, + const nxt_conf_value_t *root, const nxt_str_t *path, + nxt_conf_value_t *value, nxt_bool_t add) { nxt_str_t token; nxt_int_t ret, index; @@ -956,7 +959,7 @@ nxt_conf_op_compile(nxt_mp_t *mp, nxt_conf_op_t **ops, nxt_conf_value_t *root, nxt_conf_value_t * -nxt_conf_clone(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *value) +nxt_conf_clone(nxt_mp_t *mp, nxt_conf_op_t *op, const nxt_conf_value_t *value) { nxt_int_t rc; nxt_conf_value_t *copy; @@ -977,8 +980,8 @@ nxt_conf_clone(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *value) static nxt_int_t -nxt_conf_copy_value(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *dst, - nxt_conf_value_t *src) +nxt_conf_copy_value(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src) { if (op != NULL && src->type != NXT_CONF_VALUE_ARRAY @@ -1020,8 +1023,8 @@ nxt_conf_copy_value(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *dst, static nxt_int_t -nxt_conf_copy_array(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *dst, - nxt_conf_value_t *src) +nxt_conf_copy_array(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src) { size_t size; nxt_int_t rc; @@ -1120,8 +1123,8 @@ nxt_conf_copy_array(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *dst, static nxt_int_t -nxt_conf_copy_object(nxt_mp_t *mp, nxt_conf_op_t *op, nxt_conf_value_t *dst, - nxt_conf_value_t *src) +nxt_conf_copy_object(nxt_mp_t *mp, const nxt_conf_op_t *op, + nxt_conf_value_t *dst, const nxt_conf_value_t *src) { size_t size; nxt_int_t rc; @@ -1441,7 +1444,7 @@ nxt_conf_json_parse_value(nxt_mp_t *mp, nxt_conf_value_t *value, u_char *start, goto error; } - if (nxt_fast_path((ch - '0') <= 9)) { + if (nxt_fast_path((u_char)(ch - '0') <= 9)) { p = nxt_conf_json_parse_number(mp, value, start, end, error); if (nxt_slow_path(p == NULL)) { @@ -2232,7 +2235,8 @@ nxt_conf_json_parse_error(nxt_conf_json_error_t *error, u_char *pos, size_t -nxt_conf_json_length(nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) +nxt_conf_json_length(const nxt_conf_value_t *value, + nxt_conf_json_pretty_t *pretty) { switch (value->type) { @@ -2264,7 +2268,7 @@ nxt_conf_json_length(nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) u_char * -nxt_conf_json_print(u_char *p, nxt_conf_value_t *value, +nxt_conf_json_print(u_char *p, const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) { switch (value->type) { @@ -2298,7 +2302,7 @@ nxt_conf_json_print(u_char *p, nxt_conf_value_t *value, static size_t -nxt_conf_json_string_length(nxt_conf_value_t *value) +nxt_conf_json_string_length(const nxt_conf_value_t *value) { nxt_str_t str; @@ -2309,7 +2313,7 @@ nxt_conf_json_string_length(nxt_conf_value_t *value) static u_char * -nxt_conf_json_print_string(u_char *p, nxt_conf_value_t *value) +nxt_conf_json_print_string(u_char *p, const nxt_conf_value_t *value) { nxt_str_t str; @@ -2326,7 +2330,7 @@ nxt_conf_json_print_string(u_char *p, nxt_conf_value_t *value) static size_t -nxt_conf_json_array_length(nxt_conf_value_t *value, +nxt_conf_json_array_length(const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) { size_t len; @@ -2368,7 +2372,7 @@ nxt_conf_json_array_length(nxt_conf_value_t *value, static u_char * -nxt_conf_json_print_array(u_char *p, nxt_conf_value_t *value, +nxt_conf_json_print_array(u_char *p, const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) { nxt_uint_t n; @@ -2420,7 +2424,7 @@ nxt_conf_json_print_array(u_char *p, nxt_conf_value_t *value, static size_t -nxt_conf_json_object_length(nxt_conf_value_t *value, +nxt_conf_json_object_length(const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) { size_t len; @@ -2464,7 +2468,7 @@ nxt_conf_json_object_length(nxt_conf_value_t *value, static u_char * -nxt_conf_json_print_object(u_char *p, nxt_conf_value_t *value, +nxt_conf_json_print_object(u_char *p, const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty) { nxt_uint_t n; diff --git a/src/nxt_conf.h b/src/nxt_conf.h index 626b6d4d..493d6480 100644 --- a/src/nxt_conf.h +++ b/src/nxt_conf.h @@ -79,27 +79,28 @@ typedef struct { } nxt_conf_validation_t; -NXT_EXPORT nxt_uint_t nxt_conf_type(nxt_conf_value_t *value); +NXT_EXPORT nxt_uint_t nxt_conf_type(const nxt_conf_value_t *value); NXT_EXPORT nxt_conf_value_t *nxt_conf_get_path(nxt_conf_value_t *value, - nxt_str_t *path); -NXT_EXPORT nxt_conf_value_t *nxt_conf_get_object_member(nxt_conf_value_t *value, - nxt_str_t *name, uint32_t *index); + const nxt_str_t *path); +NXT_EXPORT nxt_conf_value_t *nxt_conf_get_object_member( + const nxt_conf_value_t *value, const nxt_str_t *name, uint32_t *index); NXT_EXPORT nxt_conf_value_t *nxt_conf_next_object_member( - nxt_conf_value_t *value, nxt_str_t *name, uint32_t *next); -NXT_EXPORT nxt_conf_value_t *nxt_conf_get_array_element(nxt_conf_value_t *value, - uint32_t index); + const nxt_conf_value_t *value, nxt_str_t *name, uint32_t *next); +NXT_EXPORT nxt_conf_value_t *nxt_conf_get_array_element( + const nxt_conf_value_t *value, uint32_t index); NXT_EXPORT nxt_conf_value_t *nxt_conf_get_array_element_or_itself( nxt_conf_value_t *value, uint32_t index); -NXT_EXPORT nxt_int_t nxt_conf_map_object(nxt_mp_t *mp, nxt_conf_value_t *value, - nxt_conf_map_t *map, nxt_uint_t n, void *data); +NXT_EXPORT nxt_int_t nxt_conf_map_object(nxt_mp_t *mp, + const nxt_conf_value_t *value, const nxt_conf_map_t *map, nxt_uint_t n, + void *data); nxt_conf_op_ret_t nxt_conf_op_compile(nxt_mp_t *mp, nxt_conf_op_t **ops, - nxt_conf_value_t *root, nxt_str_t *path, nxt_conf_value_t *value, + const nxt_conf_value_t *root, const nxt_str_t *path, nxt_conf_value_t *value, nxt_bool_t add); nxt_conf_value_t *nxt_conf_clone(nxt_mp_t *mp, nxt_conf_op_t *op, - nxt_conf_value_t *value); + const nxt_conf_value_t *value); nxt_conf_value_t *nxt_conf_json_parse(nxt_mp_t *mp, u_char *start, u_char *end, nxt_conf_json_error_t *error); @@ -107,49 +108,54 @@ nxt_conf_value_t *nxt_conf_json_parse(nxt_mp_t *mp, u_char *start, u_char *end, #define nxt_conf_json_parse_str(mp, str) \ nxt_conf_json_parse(mp, (str)->start, (str)->start + (str)->length, NULL) -size_t nxt_conf_json_length(nxt_conf_value_t *value, +size_t nxt_conf_json_length(const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); -u_char *nxt_conf_json_print(u_char *p, nxt_conf_value_t *value, +u_char *nxt_conf_json_print(u_char *p, const nxt_conf_value_t *value, nxt_conf_json_pretty_t *pretty); void nxt_conf_json_position(u_char *start, const u_char *pos, nxt_uint_t *line, nxt_uint_t *column); nxt_int_t nxt_conf_validate(nxt_conf_validation_t *vldt); -NXT_EXPORT void nxt_conf_get_string(nxt_conf_value_t *value, nxt_str_t *str); -NXT_EXPORT nxt_str_t *nxt_conf_get_string_dup(nxt_conf_value_t *value, +NXT_EXPORT void nxt_conf_get_string(const nxt_conf_value_t *value, + nxt_str_t *str); +NXT_EXPORT nxt_str_t *nxt_conf_get_string_dup(const nxt_conf_value_t *value, nxt_mp_t *mp, nxt_str_t *str); -NXT_EXPORT void nxt_conf_set_string(nxt_conf_value_t *value, nxt_str_t *str); +NXT_EXPORT void nxt_conf_set_string(nxt_conf_value_t *value, + const nxt_str_t *str); NXT_EXPORT nxt_int_t nxt_conf_set_string_dup(nxt_conf_value_t *value, nxt_mp_t *mp, const nxt_str_t *str); NXT_EXPORT double nxt_conf_get_number(nxt_conf_value_t *value); NXT_EXPORT uint8_t nxt_conf_get_boolean(nxt_conf_value_t *value); // FIXME reimplement and reorder functions below -NXT_EXPORT nxt_uint_t nxt_conf_object_members_count(nxt_conf_value_t *value); +NXT_EXPORT nxt_uint_t nxt_conf_object_members_count( + const nxt_conf_value_t *value); nxt_conf_value_t *nxt_conf_create_object(nxt_mp_t *mp, nxt_uint_t count); -void nxt_conf_set_member(nxt_conf_value_t *object, nxt_str_t *name, +void nxt_conf_set_member(nxt_conf_value_t *object, const nxt_str_t *name, const nxt_conf_value_t *value, uint32_t index); nxt_int_t nxt_conf_set_member_dup(nxt_conf_value_t *object, nxt_mp_t *mp, - nxt_str_t *name, nxt_conf_value_t *value, uint32_t index); -void nxt_conf_set_member_string(nxt_conf_value_t *object, nxt_str_t *name, - nxt_str_t *value, uint32_t index); -nxt_int_t nxt_conf_set_member_string_dup(nxt_conf_value_t *object, nxt_mp_t *mp, - nxt_str_t *name, nxt_str_t *value, uint32_t index); -void nxt_conf_set_member_integer(nxt_conf_value_t *object, nxt_str_t *name, - int64_t value, uint32_t index); -void nxt_conf_set_member_null(nxt_conf_value_t *object, nxt_str_t *name, + const nxt_str_t *name, const nxt_conf_value_t *value, uint32_t index); +void nxt_conf_set_member_string(nxt_conf_value_t *object, + const nxt_str_t *name, const nxt_str_t *value, uint32_t index); +nxt_int_t nxt_conf_set_member_string_dup(nxt_conf_value_t *object, + nxt_mp_t *mp, const nxt_str_t *name, const nxt_str_t *value, + uint32_t index); +void nxt_conf_set_member_integer(nxt_conf_value_t *object, + const nxt_str_t *name, int64_t value, uint32_t index); +void nxt_conf_set_member_null(nxt_conf_value_t *object, const nxt_str_t *name, uint32_t index); nxt_conf_value_t *nxt_conf_create_array(nxt_mp_t *mp, nxt_uint_t count); void nxt_conf_set_element(nxt_conf_value_t *array, nxt_uint_t index, const nxt_conf_value_t *value); -nxt_int_t nxt_conf_set_element_string_dup(nxt_conf_value_t *array, nxt_mp_t *mp, - nxt_uint_t index, nxt_str_t *value); -NXT_EXPORT nxt_uint_t nxt_conf_array_elements_count(nxt_conf_value_t *value); +nxt_int_t nxt_conf_set_element_string_dup(nxt_conf_value_t *array, + nxt_mp_t *mp, nxt_uint_t index, const nxt_str_t *value); +NXT_EXPORT nxt_uint_t nxt_conf_array_elements_count( + const nxt_conf_value_t *value); NXT_EXPORT nxt_uint_t nxt_conf_array_elements_count_or_1( - nxt_conf_value_t *value); -void nxt_conf_array_qsort(nxt_conf_value_t *value, + const nxt_conf_value_t *value); +void nxt_conf_array_qsort(const nxt_conf_value_t *value, int (*compare)(const void *, const void *)); diff --git a/src/nxt_conf_validation.c b/src/nxt_conf_validation.c index 2099f887..5d7f7c52 100644 --- a/src/nxt_conf_validation.c +++ b/src/nxt_conf_validation.c @@ -73,11 +73,11 @@ struct nxt_conf_vldt_object_s { static nxt_int_t nxt_conf_vldt_type(nxt_conf_validation_t *vldt, - nxt_str_t *name, nxt_conf_value_t *value, nxt_conf_vldt_type_t type); + const nxt_str_t *name, nxt_conf_value_t *value, nxt_conf_vldt_type_t type); static nxt_int_t nxt_conf_vldt_error(nxt_conf_validation_t *vldt, const char *fmt, ...); -static nxt_int_t nxt_conf_vldt_var(nxt_conf_validation_t *vldt, nxt_str_t *name, - nxt_str_t *value); +static nxt_int_t nxt_conf_vldt_var(nxt_conf_validation_t *vldt, + const nxt_str_t *name, nxt_str_t *value); static nxt_int_t nxt_conf_vldt_if(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); nxt_inline nxt_int_t nxt_conf_vldt_unsupported(nxt_conf_validation_t *vldt, @@ -134,6 +134,8 @@ static nxt_int_t nxt_conf_vldt_python_protocol(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); static nxt_int_t nxt_conf_vldt_python_prefix(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); +static nxt_int_t nxt_conf_vldt_listen_threads(nxt_conf_validation_t *vldt, + nxt_conf_value_t *value, void *data); static nxt_int_t nxt_conf_vldt_threads(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); static nxt_int_t nxt_conf_vldt_thread_stack_size(nxt_conf_validation_t *vldt, @@ -176,6 +178,8 @@ static nxt_int_t nxt_conf_vldt_app_name(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); static nxt_int_t nxt_conf_vldt_forwarded(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, void *data); +static nxt_int_t nxt_conf_vldt_listen_backlog(nxt_conf_validation_t *vldt, + nxt_conf_value_t *value, void *data); static nxt_int_t nxt_conf_vldt_app(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_conf_value_t *value); static nxt_int_t nxt_conf_vldt_object(nxt_conf_validation_t *vldt, @@ -305,6 +309,10 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_root_members[] = { static nxt_conf_vldt_object_t nxt_conf_vldt_setting_members[] = { { + .name = nxt_string("listen_threads"), + .type = NXT_CONF_VLDT_INTEGER, + .validator = nxt_conf_vldt_listen_threads, + }, { .name = nxt_string("http"), .type = NXT_CONF_VLDT_OBJECT, .validator = nxt_conf_vldt_object, @@ -368,6 +376,9 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_http_members[] = { }, { .name = nxt_string("server_version"), .type = NXT_CONF_VLDT_BOOLEAN, + }, { + .name = nxt_string("chunked_transform"), + .type = NXT_CONF_VLDT_BOOLEAN, }, NXT_CONF_VLDT_END @@ -421,6 +432,10 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_listener_members[] = { .type = NXT_CONF_VLDT_OBJECT, .validator = nxt_conf_vldt_object, .u.members = nxt_conf_vldt_client_ip_members + }, { + .name = nxt_string("backlog"), + .type = NXT_CONF_VLDT_NUMBER, + .validator = nxt_conf_vldt_listen_backlog, }, #if (NXT_TLS) @@ -681,6 +696,10 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_match_members[] = { .type = NXT_CONF_VLDT_OBJECT | NXT_CONF_VLDT_ARRAY, .validator = nxt_conf_vldt_match_patterns_sets, .u.string = "cookies" + }, { + .name = nxt_string("if"), + .type = NXT_CONF_VLDT_STRING, + .validator = nxt_conf_vldt_if, }, NXT_CONF_VLDT_END @@ -839,6 +858,11 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_python_members[] = { .validator = nxt_conf_vldt_targets_exclusive, .u.string = "callable", }, { + .name = nxt_string("factory"), + .type = NXT_CONF_VLDT_BOOLEAN, + .validator = nxt_conf_vldt_targets_exclusive, + .u.string = "factory", + }, { .name = nxt_string("prefix"), .type = NXT_CONF_VLDT_STRING, .validator = nxt_conf_vldt_targets_exclusive, @@ -863,6 +887,9 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_python_target_members[] = { .name = nxt_string("callable"), .type = NXT_CONF_VLDT_STRING, }, { + .name = nxt_string("factory"), + .type = NXT_CONF_VLDT_BOOLEAN, + }, { .name = nxt_string("prefix"), .type = NXT_CONF_VLDT_STRING, .validator = nxt_conf_vldt_python_prefix, @@ -881,6 +908,9 @@ static nxt_conf_vldt_object_t nxt_conf_vldt_python_notargets_members[] = { .name = nxt_string("callable"), .type = NXT_CONF_VLDT_STRING, }, { + .name = nxt_string("factory"), + .type = NXT_CONF_VLDT_BOOLEAN, + }, { .name = nxt_string("prefix"), .type = NXT_CONF_VLDT_STRING, .validator = nxt_conf_vldt_python_prefix, @@ -1436,7 +1466,7 @@ nxt_conf_validate(nxt_conf_validation_t *vldt) static nxt_int_t -nxt_conf_vldt_type(nxt_conf_validation_t *vldt, nxt_str_t *name, +nxt_conf_vldt_type(nxt_conf_validation_t *vldt, const nxt_str_t *name, nxt_conf_value_t *value, nxt_conf_vldt_type_t type) { u_char *p; @@ -1445,7 +1475,7 @@ nxt_conf_vldt_type(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_uint_t value_type, n, t; u_char buf[nxt_length(NXT_CONF_VLDT_ANY_TYPE_STR)]; - static nxt_str_t type_name[] = { + static const nxt_str_t type_name[] = { nxt_string("a null"), nxt_string("a boolean"), nxt_string("an integer number"), @@ -1548,7 +1578,7 @@ nxt_conf_vldt_unsupported(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, static nxt_int_t -nxt_conf_vldt_var(nxt_conf_validation_t *vldt, nxt_str_t *name, +nxt_conf_vldt_var(nxt_conf_validation_t *vldt, const nxt_str_t *name, nxt_str_t *value) { u_char error[NXT_MAX_ERROR_STR]; @@ -1568,7 +1598,7 @@ nxt_conf_vldt_if(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, { nxt_str_t str; - static nxt_str_t if_str = nxt_string("if"); + static const nxt_str_t if_str = nxt_string("if"); if (nxt_conf_type(value) != NXT_CONF_STRING) { return nxt_conf_vldt_error(vldt, "The \"if\" must be a string"); @@ -1731,7 +1761,7 @@ nxt_conf_vldt_action(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, nxt_conf_value_t *action; nxt_conf_vldt_object_t *members; - static struct { + static const struct { nxt_str_t name; nxt_conf_vldt_object_t *members; @@ -1778,7 +1808,7 @@ nxt_conf_vldt_pass(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, nxt_int_t ret; nxt_str_t segments[3]; - static nxt_str_t targets_str = nxt_string("targets"); + static const nxt_str_t targets_str = nxt_string("targets"); nxt_conf_get_string(value, &pass); @@ -1932,7 +1962,7 @@ nxt_conf_vldt_share_element(nxt_conf_validation_t *vldt, { nxt_str_t str; - static nxt_str_t share = nxt_string("share"); + static const nxt_str_t share = nxt_string("share"); if (nxt_conf_type(value) != NXT_CONF_STRING) { return nxt_conf_vldt_error(vldt, "The \"share\" array must " @@ -1982,7 +2012,7 @@ nxt_conf_vldt_python(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, { nxt_conf_value_t *targets; - static nxt_str_t targets_str = nxt_string("targets"); + static const nxt_str_t targets_str = nxt_string("targets"); targets = nxt_conf_get_object_member(value, &targets_str, NULL); @@ -2064,6 +2094,27 @@ nxt_conf_vldt_python_prefix(nxt_conf_validation_t *vldt, return NXT_OK; } +static nxt_int_t +nxt_conf_vldt_listen_threads(nxt_conf_validation_t *vldt, + nxt_conf_value_t *value, void *data) +{ + int64_t threads; + + threads = nxt_conf_get_number(value); + + if (threads < 1) { + return nxt_conf_vldt_error(vldt, "The \"listen_threads\" number must " + "be equal to or greater than 1."); + } + + if (threads > NXT_INT32_T_MAX) { + return nxt_conf_vldt_error(vldt, "The \"listen_threads\" number must " + "not exceed %d.", NXT_INT32_T_MAX); + } + + return NXT_OK; +} + static nxt_int_t nxt_conf_vldt_threads(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, @@ -2572,9 +2623,10 @@ static nxt_int_t nxt_conf_vldt_response_header(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_conf_value_t *value) { + nxt_str_t str; nxt_uint_t type; - static nxt_str_t content_length = nxt_string("Content-Length"); + static const nxt_str_t content_length = nxt_string("Content-Length"); if (name->length == 0) { return nxt_conf_vldt_error(vldt, "The response header name " @@ -2588,7 +2640,17 @@ nxt_conf_vldt_response_header(nxt_conf_validation_t *vldt, nxt_str_t *name, type = nxt_conf_type(value); - if (type == NXT_CONF_STRING || type == NXT_CONF_NULL) { + if (type == NXT_CONF_NULL) { + return NXT_OK; + } + + if (type == NXT_CONF_STRING) { + nxt_conf_get_string(value, &str); + + if (nxt_is_tstr(&str)) { + return nxt_conf_vldt_var(vldt, name, &str); + } + return NXT_OK; } @@ -2604,7 +2666,7 @@ nxt_conf_vldt_app_name(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, nxt_str_t name; nxt_conf_value_t *apps, *app; - static nxt_str_t apps_str = nxt_string("applications"); + static const nxt_str_t apps_str = nxt_string("applications"); nxt_conf_get_string(value, &name); @@ -2636,8 +2698,8 @@ nxt_conf_vldt_forwarded(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, { nxt_conf_value_t *client_ip, *protocol; - static nxt_str_t client_ip_str = nxt_string("client_ip"); - static nxt_str_t protocol_str = nxt_string("protocol"); + static const nxt_str_t client_ip_str = nxt_string("client_ip"); + static const nxt_str_t protocol_str = nxt_string("protocol"); client_ip = nxt_conf_get_object_member(value, &client_ip_str, NULL); protocol = nxt_conf_get_object_member(value, &protocol_str, NULL); @@ -2653,6 +2715,32 @@ nxt_conf_vldt_forwarded(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, static nxt_int_t +nxt_conf_vldt_listen_backlog(nxt_conf_validation_t *vldt, + nxt_conf_value_t *value, void *data) +{ + int64_t backlog; + + backlog = nxt_conf_get_number(value); + + /* + * POSIX allows this to be 0 and some systems use -1 to + * indicate to use the OS's default value. + */ + if (backlog < -1) { + return nxt_conf_vldt_error(vldt, "The \"backlog\" number must be " + "equal to or greater than -1."); + } + + if (backlog > NXT_INT32_T_MAX) { + return nxt_conf_vldt_error(vldt, "The \"backlog\" number must " + "not exceed %d.", NXT_INT32_T_MAX); + } + + return NXT_OK; +} + + +static nxt_int_t nxt_conf_vldt_app(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_conf_value_t *value) { @@ -2662,9 +2750,9 @@ nxt_conf_vldt_app(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_conf_value_t *type_value; nxt_app_lang_module_t *lang; - static nxt_str_t type_str = nxt_string("type"); + static const nxt_str_t type_str = nxt_string("type"); - static struct { + static const struct { nxt_conf_vldt_handler_t validator; nxt_conf_vldt_object_t *members; @@ -3177,7 +3265,7 @@ nxt_conf_vldt_php(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, { nxt_conf_value_t *targets; - static nxt_str_t targets_str = nxt_string("targets"); + static const nxt_str_t targets_str = nxt_string("targets"); targets = nxt_conf_get_object_member(value, &targets_str, NULL); @@ -3258,7 +3346,7 @@ nxt_conf_vldt_upstream(nxt_conf_validation_t *vldt, nxt_str_t *name, nxt_int_t ret; nxt_conf_value_t *conf; - static nxt_str_t servers = nxt_string("servers"); + static const nxt_str_t servers = nxt_string("servers"); ret = nxt_conf_vldt_type(vldt, name, value, NXT_CONF_VLDT_OBJECT); @@ -3403,7 +3491,7 @@ nxt_conf_vldt_access_log(nxt_conf_validation_t *vldt, nxt_conf_value_t *value, nxt_int_t ret; nxt_conf_vldt_access_log_conf_t conf; - static nxt_str_t format_str = nxt_string("format"); + static const nxt_str_t format_str = nxt_string("format"); if (nxt_conf_type(value) == NXT_CONF_STRING) { return NXT_OK; diff --git a/src/nxt_controller.c b/src/nxt_controller.c index eb814321..1ffcf815 100644 --- a/src/nxt_controller.c +++ b/src/nxt_controller.c @@ -695,7 +695,7 @@ nxt_runtime_controller_socket(nxt_task_t *task, nxt_runtime_t *rt) if (ls->sockaddr->u.sockaddr.sa_family == AF_UNIX) { const char *path = ls->sockaddr->u.sockaddr_un.sun_path; - nxt_fs_mkdir_parent((const u_char *) path, 0755); + nxt_fs_mkdir_p_dirname((const u_char *) path, 0755); } #endif @@ -1908,12 +1908,12 @@ nxt_controller_process_cert_save(nxt_task_t *task, nxt_port_recv_msg_t *msg, static nxt_bool_t nxt_controller_cert_in_use(nxt_str_t *name) { - uint32_t next; + uint32_t i, n, next; nxt_str_t str; - nxt_conf_value_t *listeners, *listener, *value; + nxt_conf_value_t *listeners, *listener, *value, *element; - static nxt_str_t listeners_path = nxt_string("/listeners"); - static nxt_str_t certificate_path = nxt_string("/tls/certificate"); + static const nxt_str_t listeners_path = nxt_string("/listeners"); + static const nxt_str_t certificate_path = nxt_string("/tls/certificate"); listeners = nxt_conf_get_path(nxt_controller_conf.root, &listeners_path); @@ -1931,10 +1931,27 @@ nxt_controller_cert_in_use(nxt_str_t *name) continue; } - nxt_conf_get_string(value, &str); + if (nxt_conf_type(value) == NXT_CONF_ARRAY) { + n = nxt_conf_array_elements_count(value); - if (nxt_strstr_eq(&str, name)) { - return 1; + for (i = 0; i < n; i++) { + element = nxt_conf_get_array_element(value, i); + + nxt_conf_get_string(element, &str); + + if (nxt_strstr_eq(&str, name)) { + return 1; + } + } + + } else { + /* NXT_CONF_STRING */ + + nxt_conf_get_string(value, &str); + + if (nxt_strstr_eq(&str, name)) { + return 1; + } } } } @@ -2178,7 +2195,7 @@ nxt_controller_script_in_use(nxt_str_t *name) nxt_str_t str; nxt_conf_value_t *js_module, *element; - static nxt_str_t js_module_path = nxt_string("/settings/js_module"); + static const nxt_str_t js_module_path = nxt_string("/settings/js_module"); js_module = nxt_conf_get_path(nxt_controller_conf.root, &js_module_path); @@ -2486,13 +2503,13 @@ nxt_controller_response(nxt_task_t *task, nxt_controller_request_t *req, nxt_conf_value_t *value, *location; nxt_conf_json_pretty_t pretty; - static nxt_str_t success_str = nxt_string("success"); - static nxt_str_t error_str = nxt_string("error"); - static nxt_str_t detail_str = nxt_string("detail"); - static nxt_str_t location_str = nxt_string("location"); - static nxt_str_t offset_str = nxt_string("offset"); - static nxt_str_t line_str = nxt_string("line"); - static nxt_str_t column_str = nxt_string("column"); + static const nxt_str_t success_str = nxt_string("success"); + static const nxt_str_t error_str = nxt_string("error"); + static const nxt_str_t detail_str = nxt_string("detail"); + static const nxt_str_t location_str = nxt_string("location"); + static const nxt_str_t offset_str = nxt_string("offset"); + static const nxt_str_t line_str = nxt_string("line"); + static const nxt_str_t column_str = nxt_string("column"); static nxt_time_string_t date_cache = { (nxt_atomic_uint_t) -1, @@ -2652,11 +2669,12 @@ static u_char * nxt_controller_date(u_char *buf, nxt_realtime_t *now, struct tm *tm, size_t size, const char *format) { - static const char *week[] = { "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", - "Sat" }; + static const char * const week[] = { "Sun", "Mon", "Tue", "Wed", "Thu", + "Fri", "Sat" }; - static const char *month[] = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; + static const char * const month[] = { "Jan", "Feb", "Mar", "Apr", "May", + "Jun", "Jul", "Aug", "Sep", "Oct", + "Nov", "Dec" }; return nxt_sprintf(buf, buf + size, format, week[tm->tm_wday], tm->tm_mday, diff --git a/src/nxt_fs.c b/src/nxt_fs.c index e10c5bcb..8ea8e186 100644 --- a/src/nxt_fs.c +++ b/src/nxt_fs.c @@ -1,5 +1,6 @@ /* * Copyright (C) NGINX, Inc. + * Copyright 2024, Alejandro Colomar <alx@kernel.org> */ #include <nxt_main.h> @@ -9,25 +10,22 @@ static nxt_int_t nxt_fs_mkdir(const u_char *dir, mode_t mode); nxt_int_t -nxt_fs_mkdir_all(const u_char *dir, mode_t mode) +nxt_fs_mkdir_p(const u_char *dir, mode_t mode) { - char *start, *end, *dst; - size_t dirlen; - char path[PATH_MAX]; + char *start, *end, *dst; + size_t dirlen; + nxt_int_t ret; + char path[PATH_MAX]; dirlen = nxt_strlen(dir); - nxt_assert(dirlen < PATH_MAX && dirlen > 1 && dir[0] == '/'); + nxt_assert(dirlen < PATH_MAX && dirlen > 0); dst = path; start = (char *) dir; while (*start != '\0') { - if (*start == '/') { - *dst++ = *start++; - } - - end = strchr(start, '/'); + end = strchr(start + 1, '/'); if (end == NULL) { end = ((char *)dir + dirlen); } @@ -35,9 +33,8 @@ nxt_fs_mkdir_all(const u_char *dir, mode_t mode) dst = nxt_cpymem(dst, start, end - start); *dst = '\0'; - if (nxt_slow_path(nxt_fs_mkdir((u_char *) path, mode) != NXT_OK - && nxt_errno != EEXIST)) - { + ret = nxt_fs_mkdir((u_char *) path, mode); + if (nxt_slow_path(ret != NXT_OK && nxt_errno != EEXIST)) { return NXT_ERROR; } @@ -49,7 +46,7 @@ nxt_fs_mkdir_all(const u_char *dir, mode_t mode) nxt_int_t -nxt_fs_mkdir_parent(const u_char *path, mode_t mode) +nxt_fs_mkdir_p_dirname(const u_char *path, mode_t mode) { char *ptr, *dir; nxt_int_t ret; @@ -62,11 +59,14 @@ nxt_fs_mkdir_parent(const u_char *path, mode_t mode) ret = NXT_OK; ptr = strrchr(dir, '/'); - if (nxt_fast_path(ptr != NULL)) { - *ptr = '\0'; - ret = nxt_fs_mkdir((const u_char *) dir, mode); + if (ptr == dir || nxt_slow_path(ptr == NULL)) { + goto out_free; } + *ptr = '\0'; + ret = nxt_fs_mkdir_p((const u_char *) dir, mode); + +out_free: nxt_free(dir); return ret; diff --git a/src/nxt_fs.h b/src/nxt_fs.h index c8868d80..a06e4d3d 100644 --- a/src/nxt_fs.h +++ b/src/nxt_fs.h @@ -6,8 +6,8 @@ #define _NXT_FS_H_INCLUDED_ -nxt_int_t nxt_fs_mkdir_parent(const u_char *path, mode_t mode); -nxt_int_t nxt_fs_mkdir_all(const u_char *dir, mode_t mode); +nxt_int_t nxt_fs_mkdir_p_dirname(const u_char *path, mode_t mode); +nxt_int_t nxt_fs_mkdir_p(const u_char *dir, mode_t mode); #endif /* _NXT_FS_H_INCLUDED_ */ diff --git a/src/nxt_h1proto.c b/src/nxt_h1proto.c index 1dfe4b6e..48c2697b 100644 --- a/src/nxt_h1proto.c +++ b/src/nxt_h1proto.c @@ -90,6 +90,8 @@ static void nxt_h1p_peer_connect(nxt_task_t *task, nxt_http_peer_t *peer); static void nxt_h1p_peer_connected(nxt_task_t *task, void *obj, void *data); static void nxt_h1p_peer_refused(nxt_task_t *task, void *obj, void *data); static void nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer); +static nxt_int_t nxt_h1p_peer_request_target(nxt_http_request_t *r, + nxt_str_t *target); static void nxt_h1p_peer_header_sent(nxt_task_t *task, void *obj, void *data); static void nxt_h1p_peer_header_read(nxt_task_t *task, nxt_http_peer_t *peer); static ssize_t nxt_h1p_peer_io_read_handler(nxt_task_t *task, nxt_conn_t *c); @@ -654,6 +656,8 @@ nxt_h1p_header_process(nxt_task_t *task, nxt_h1proto_t *h1p, r->target.start = h1p->parser.target_start; r->target.length = h1p->parser.target_end - h1p->parser.target_start; + r->quoted_target = h1p->parser.quoted_target; + if (h1p->parser.version.ui64 != 0) { r->version.start = h1p->parser.version.str; r->version.length = sizeof(h1p->parser.version.str); @@ -835,7 +839,12 @@ nxt_h1p_transfer_encoding(void *ctx, nxt_http_field_t *field, uintptr_t data) if (field->value_length == 7 && memcmp(field->value, "chunked", 7) == 0) { + if (r->chunked_field != NULL) { + return NXT_HTTP_BAD_REQUEST; + } + te = NXT_HTTP_TE_CHUNKED; + r->chunked_field = field; } else { te = NXT_HTTP_TE_UNSUPPORTED; @@ -852,15 +861,16 @@ nxt_h1p_request_body_read(nxt_task_t *task, nxt_http_request_t *r) { size_t size, body_length, body_buffer_size, body_rest; ssize_t res; - nxt_str_t *tmp_path, tmp_name; - nxt_buf_t *in, *b; + nxt_buf_t *in, *b, *out, *chunk; nxt_conn_t *c; nxt_h1proto_t *h1p; + nxt_socket_conf_t *skcf; nxt_http_status_t status; static const nxt_str_t tmp_name_pattern = nxt_string("/req-XXXXXXXX"); h1p = r->proto.h1; + skcf = r->conf->socket_conf; nxt_debug(task, "h1p request body read %O te:%d", r->content_length_n, h1p->transfer_encoding); @@ -868,8 +878,19 @@ nxt_h1p_request_body_read(nxt_task_t *task, nxt_http_request_t *r) switch (h1p->transfer_encoding) { case NXT_HTTP_TE_CHUNKED: - status = NXT_HTTP_LENGTH_REQUIRED; - goto error; + if (!skcf->chunked_transform) { + status = NXT_HTTP_LENGTH_REQUIRED; + goto error; + } + + if (r->content_length != NULL || !nxt_h1p_is_http11(h1p)) { + status = NXT_HTTP_BAD_REQUEST; + goto error; + } + + r->chunked = 1; + h1p->chunked_parse.mem_pool = r->mem_pool; + break; case NXT_HTTP_TE_UNSUPPORTED: status = NXT_HTTP_NOT_IMPLEMENTED; @@ -880,40 +901,31 @@ nxt_h1p_request_body_read(nxt_task_t *task, nxt_http_request_t *r) break; } - if (r->content_length_n == -1 || r->content_length_n == 0) { + if (!r->chunked && + (r->content_length_n == -1 || r->content_length_n == 0)) + { goto ready; } body_length = (size_t) r->content_length_n; - body_buffer_size = nxt_min(r->conf->socket_conf->body_buffer_size, - body_length); + body_buffer_size = nxt_min(skcf->body_buffer_size, body_length); if (body_length > body_buffer_size) { - tmp_path = &r->conf->socket_conf->body_temp_path; + nxt_str_t *tmp_path, tmp_name; + + tmp_path = &skcf->body_temp_path; tmp_name.length = tmp_path->length + tmp_name_pattern.length; b = nxt_buf_file_alloc(r->mem_pool, body_buffer_size + sizeof(nxt_file_t) + tmp_name.length + 1, 0); + if (nxt_slow_path(b == NULL)) { + status = NXT_HTTP_INTERNAL_SERVER_ERROR; + goto error; + } - } else { - /* This initialization required for CentOS 6, gcc 4.4.7. */ - tmp_path = NULL; - tmp_name.length = 0; - - b = nxt_buf_mem_alloc(r->mem_pool, body_buffer_size, 0); - } - - if (nxt_slow_path(b == NULL)) { - status = NXT_HTTP_INTERNAL_SERVER_ERROR; - goto error; - } - - r->body = b; - - if (body_length > body_buffer_size) { tmp_name.start = nxt_pointer_to(b->mem.start, sizeof(nxt_file_t)); memcpy(tmp_name.start, tmp_path->start, tmp_path->length); @@ -942,33 +954,80 @@ nxt_h1p_request_body_read(nxt_task_t *task, nxt_http_request_t *r) &tmp_name, b->file->fd); unlink((char *) tmp_name.start); + + } else { + b = nxt_buf_mem_alloc(r->mem_pool, body_buffer_size, 0); + if (nxt_slow_path(b == NULL)) { + status = NXT_HTTP_INTERNAL_SERVER_ERROR; + goto error; + } } - body_rest = body_length; + r->body = b; + + body_rest = r->chunked ? 1 : body_length; in = h1p->conn->read; size = nxt_buf_mem_used_size(&in->mem); if (size != 0) { - size = nxt_min(size, body_length); - if (nxt_buf_is_file(b)) { - res = nxt_fd_write(b->file->fd, in->mem.pos, size); - if (nxt_slow_path(res < (ssize_t) size)) { - status = NXT_HTTP_INTERNAL_SERVER_ERROR; - goto error; - } + if (r->chunked) { + out = nxt_http_chunk_parse(task, &h1p->chunked_parse, in); - b->file_end += size; + if (h1p->chunked_parse.error) { + status = NXT_HTTP_INTERNAL_SERVER_ERROR; + goto error; + } + + if (h1p->chunked_parse.chunk_error) { + status = NXT_HTTP_BAD_REQUEST; + goto error; + } + + for (chunk = out; chunk != NULL; chunk = chunk->next) { + size = nxt_buf_mem_used_size(&chunk->mem); + + res = nxt_fd_write(b->file->fd, chunk->mem.pos, size); + if (nxt_slow_path(res < (ssize_t) size)) { + status = NXT_HTTP_INTERNAL_SERVER_ERROR; + goto error; + } + + b->file_end += size; + + if ((size_t) b->file_end > skcf->max_body_size) { + status = NXT_HTTP_PAYLOAD_TOO_LARGE; + goto error; + } + } + + if (h1p->chunked_parse.last) { + body_rest = 0; + } + + } else { + size = nxt_min(size, body_length); + res = nxt_fd_write(b->file->fd, in->mem.pos, size); + if (nxt_slow_path(res < (ssize_t) size)) { + status = NXT_HTTP_INTERNAL_SERVER_ERROR; + goto error; + } + + b->file_end += size; + + in->mem.pos += size; + body_rest -= size; + } } else { size = nxt_min(body_buffer_size, size); b->mem.free = nxt_cpymem(b->mem.free, in->mem.pos, size); - } - in->mem.pos += size; - body_rest -= size; + in->mem.pos += size; + body_rest -= size; + } } nxt_debug(task, "h1p body rest: %uz", body_rest); @@ -1026,9 +1085,10 @@ nxt_h1p_conn_request_body_read(nxt_task_t *task, void *obj, void *data) { size_t size, body_rest; ssize_t res; - nxt_buf_t *b; + nxt_buf_t *b, *out, *chunk; nxt_conn_t *c; nxt_h1proto_t *h1p; + nxt_socket_conf_t *skcf; nxt_http_request_t *r; nxt_event_engine_t *engine; @@ -1038,38 +1098,77 @@ nxt_h1p_conn_request_body_read(nxt_task_t *task, void *obj, void *data) nxt_debug(task, "h1p conn request body read"); r = h1p->request; + skcf = r->conf->socket_conf; engine = task->thread->engine; b = c->read; if (nxt_buf_is_file(b)) { - body_rest = b->file->size - b->file_end; - size = nxt_buf_mem_used_size(&b->mem); - size = nxt_min(size, body_rest); + if (r->chunked) { + body_rest = 1; - res = nxt_fd_write(b->file->fd, b->mem.pos, size); - if (nxt_slow_path(res < (ssize_t) size)) { - nxt_h1p_request_error(task, h1p, r); - return; - } + out = nxt_http_chunk_parse(task, &h1p->chunked_parse, b); - b->file_end += size; - body_rest -= res; + if (h1p->chunked_parse.error) { + nxt_h1p_request_error(task, h1p, r); + return; + } - b->mem.pos += size; + if (h1p->chunked_parse.chunk_error) { + nxt_http_request_error(task, r, NXT_HTTP_BAD_REQUEST); + return; + } - if (b->mem.pos == b->mem.free) { - if (body_rest >= (size_t) nxt_buf_mem_size(&b->mem)) { - b->mem.free = b->mem.start; + for (chunk = out; chunk != NULL; chunk = chunk->next) { + size = nxt_buf_mem_used_size(&chunk->mem); + res = nxt_fd_write(b->file->fd, chunk->mem.pos, size); + if (nxt_slow_path(res < (ssize_t) size)) { + nxt_h1p_request_error(task, h1p, r); + return; + } - } else { - /* This required to avoid reading next request. */ - b->mem.free = b->mem.end - body_rest; + b->file_end += size; + + if ((size_t) b->file_end > skcf->max_body_size) { + nxt_h1p_request_error(task, h1p, r); + return; + } + } + + if (h1p->chunked_parse.last) { + body_rest = 0; } - b->mem.pos = b->mem.free; + } else { + body_rest = b->file->size - b->file_end; + + size = nxt_buf_mem_used_size(&b->mem); + size = nxt_min(size, body_rest); + + res = nxt_fd_write(b->file->fd, b->mem.pos, size); + if (nxt_slow_path(res < (ssize_t) size)) { + nxt_h1p_request_error(task, h1p, r); + return; + } + + b->file_end += size; + body_rest -= res; + + b->mem.pos += size; + + if (b->mem.pos == b->mem.free) { + if (body_rest >= (size_t) nxt_buf_mem_size(&b->mem)) { + b->mem.free = b->mem.start; + + } else { + /* This required to avoid reading next request. */ + b->mem.free = b->mem.end - body_rest; + } + + b->mem.pos = b->mem.free; + } } } else { @@ -2263,6 +2362,8 @@ nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer) { u_char *p; size_t size; + nxt_int_t ret; + nxt_str_t target; nxt_buf_t *header, *body; nxt_conn_t *c; nxt_http_field_t *field; @@ -2272,7 +2373,12 @@ nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer) r = peer->request; - size = r->method->length + sizeof(" ") + r->target.length + ret = nxt_h1p_peer_request_target(r, &target); + if (nxt_slow_path(ret != NXT_OK)) { + goto fail; + } + + size = r->method->length + sizeof(" ") + target.length + sizeof(" HTTP/1.1\r\n") + sizeof("Connection: close\r\n") + sizeof("\r\n"); @@ -2288,15 +2394,14 @@ nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer) header = nxt_http_buf_mem(task, r, size); if (nxt_slow_path(header == NULL)) { - r->state->error_handler(task, r, peer); - return; + goto fail; } p = header->mem.free; p = nxt_cpymem(p, r->method->start, r->method->length); *p++ = ' '; - p = nxt_cpymem(p, r->target.start, r->target.length); + p = nxt_cpymem(p, target.start, target.length); p = nxt_cpymem(p, " HTTP/1.1\r\n", 11); p = nxt_cpymem(p, "Connection: close\r\n", 19); @@ -2328,8 +2433,7 @@ nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer) } if (nxt_slow_path(body == NULL)) { - r->state->error_handler(task, r, peer); - return; + goto fail; } header->next = body; @@ -2353,6 +2457,61 @@ nxt_h1p_peer_header_send(nxt_task_t *task, nxt_http_peer_t *peer) } nxt_conn_write(task->thread->engine, c); + + return; + +fail: + + r->state->error_handler(task, r, peer); +} + + +static nxt_int_t +nxt_h1p_peer_request_target(nxt_http_request_t *r, nxt_str_t *target) +{ + u_char *p; + size_t size, encode; + + if (!r->uri_changed) { + *target = r->target; + return NXT_OK; + } + + if (!r->quoted_target && r->args->length == 0) { + *target = *r->path; + return NXT_OK; + } + + if (r->quoted_target) { + encode = nxt_encode_complex_uri(NULL, r->path->start, + r->path->length); + } else { + encode = 0; + } + + size = r->path->length + encode * 2 + 1 + r->args->length; + + target->start = nxt_mp_nget(r->mem_pool, size); + if (target->start == NULL) { + return NXT_ERROR; + } + + if (r->quoted_target) { + p = (u_char *) nxt_encode_complex_uri(target->start, r->path->start, + r->path->length); + + } else { + p = nxt_cpymem(target->start, r->path->start, r->path->length); + } + + if (r->args->length > 0) { + *p++ = '?'; + p = nxt_cpymem(p, r->args->start, r->args->length); + } + + target->length = p - target->start; + + return NXT_OK; } @@ -2710,6 +2869,11 @@ nxt_h1p_peer_body_process(nxt_task_t *task, nxt_http_peer_t *peer, } else if (h1p->remainder > 0) { length = nxt_buf_chain_length(out); h1p->remainder -= length; + + if (h1p->remainder == 0) { + nxt_buf_chain_add(&out, nxt_http_buf_last(peer->request)); + peer->closed = 1; + } } peer->body = out; diff --git a/src/nxt_http.h b/src/nxt_http.h index e812bd0d..5369c8e1 100644 --- a/src/nxt_http.h +++ b/src/nxt_http.h @@ -157,6 +157,7 @@ struct nxt_http_request_s { nxt_list_t *fields; nxt_http_field_t *content_type; nxt_http_field_t *content_length; + nxt_http_field_t *chunked_field; nxt_http_field_t *cookie; nxt_http_field_t *referer; nxt_http_field_t *user_agent; @@ -192,6 +193,8 @@ struct nxt_http_request_s { nxt_http_status_t status:16; uint8_t log_route; /* 1 bit */ + uint8_t quoted_target; /* 1 bit */ + uint8_t uri_changed; /* 1 bit */ uint8_t pass_count; /* 8 bits */ uint8_t app_target; @@ -202,6 +205,7 @@ struct nxt_http_request_s { uint8_t inconsistent; /* 1 bit */ uint8_t error; /* 1 bit */ uint8_t websocket_handshake; /* 1 bit */ + uint8_t chunked; /* 1 bit */ }; @@ -303,11 +307,12 @@ struct nxt_http_forward_s { nxt_inline u_char * nxt_http_date(u_char *buf, struct tm *tm) { - static const char *week[] = { "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", - "Sat" }; + static const char * const week[] = { "Sun", "Mon", "Tue", "Wed", "Thu", + "Fri", "Sat" }; - static const char *month[] = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; + static const char * const month[] = { "Jan", "Feb", "Mar", "Apr", "May", + "Jun", "Jul", "Aug", "Sep", "Oct", + "Nov", "Dec" }; return nxt_sprintf(buf, buf + NXT_HTTP_DATE_LEN, "%s, %02d %s %4d %02d:%02d:%02d GMT", @@ -436,6 +441,9 @@ void nxt_h1p_complete_buffers(nxt_task_t *task, nxt_h1proto_t *h1p, nxt_bool_t all); nxt_msec_t nxt_h1p_conn_request_timer_value(nxt_conn_t *c, uintptr_t data); +int nxt_http_cond_value(nxt_task_t *task, nxt_http_request_t *r, + nxt_tstr_cond_t *cond); + extern const nxt_conn_state_t nxt_h1p_idle_close_state; #endif /* _NXT_HTTP_H_INCLUDED_ */ diff --git a/src/nxt_http_chunk_parse.c b/src/nxt_http_chunk_parse.c index b60bc801..a43ce75b 100644 --- a/src/nxt_http_chunk_parse.c +++ b/src/nxt_http_chunk_parse.c @@ -48,9 +48,7 @@ nxt_http_chunk_parse(nxt_task_t *task, nxt_http_chunk_parse_t *hcp, for (b = in; b != NULL; b = next) { - hcp->pos = b->mem.pos; - - while (hcp->pos < b->mem.free) { + while (b->mem.pos < b->mem.free) { /* * The sw_chunk state is tested outside the switch * to preserve hcp->pos and to not touch memory. @@ -76,7 +74,7 @@ nxt_http_chunk_parse(nxt_task_t *task, nxt_http_chunk_parse_t *hcp, /* ret == NXT_HTTP_CHUNK_END */ } - ch = *hcp->pos++; + ch = *b->mem.pos++; switch (state) { @@ -203,7 +201,7 @@ nxt_http_chunk_buffer(nxt_http_chunk_parse_t *hcp, nxt_buf_t ***tail, size_t size; nxt_buf_t *b; - p = hcp->pos; + p = in->mem.pos; size = in->mem.free - p; b = nxt_buf_mem_alloc(hcp->mem_pool, 0, 0); @@ -224,7 +222,7 @@ nxt_http_chunk_buffer(nxt_http_chunk_parse_t *hcp, nxt_buf_t ***tail, if (hcp->chunk_size < size) { p += hcp->chunk_size; - hcp->pos = p; + in->mem.pos = p; b->mem.free = p; b->mem.end = p; diff --git a/src/nxt_http_js.c b/src/nxt_http_js.c index e3beb8b4..34689fba 100644 --- a/src/nxt_http_js.c +++ b/src/nxt_http_js.c @@ -120,7 +120,8 @@ nxt_http_js_ext_uri(njs_vm_t *vm, njs_object_prop_t *prop, return NJS_DECLINED; } - return njs_vm_value_string_set(vm, retval, r->path->start, r->path->length); + return njs_vm_value_string_create(vm, retval, r->path->start, + r->path->length); } @@ -136,7 +137,8 @@ nxt_http_js_ext_host(njs_vm_t *vm, njs_object_prop_t *prop, return NJS_DECLINED; } - return njs_vm_value_string_set(vm, retval, r->host.start, r->host.length); + return njs_vm_value_string_create(vm, retval, r->host.start, + r->host.length); } @@ -152,9 +154,9 @@ nxt_http_js_ext_remote_addr(njs_vm_t *vm, njs_object_prop_t *prop, return NJS_DECLINED; } - return njs_vm_value_string_set(vm, retval, - nxt_sockaddr_address(r->remote), - r->remote->address_length); + return njs_vm_value_string_create(vm, retval, + nxt_sockaddr_address(r->remote), + r->remote->address_length); } @@ -162,6 +164,7 @@ static njs_int_t nxt_http_js_ext_get_args(njs_vm_t *vm, njs_object_prop_t *prop, njs_value_t *value, njs_value_t *setval, njs_value_t *retval) { + u_char *start; njs_int_t ret; njs_value_t *args; njs_opaque_value_t val; @@ -175,8 +178,8 @@ nxt_http_js_ext_get_args(njs_vm_t *vm, njs_object_prop_t *prop, args = njs_value_arg(&val); - ret = njs_vm_query_string_parse(vm, r->args->start, - r->args->start + r->args->length, args); + start = (r->args->start != NULL) ? r->args->start : (u_char *) ""; + ret = njs_vm_query_string_parse(vm, start, start + r->args->length, args); if (ret == NJS_ERROR) { return NJS_ERROR; @@ -214,8 +217,8 @@ nxt_http_js_ext_get_header(njs_vm_t *vm, njs_object_prop_t *prop, if (key.length == f->name_length && memcmp(key.start, f->name, f->name_length) == 0) { - return njs_vm_value_string_set(vm, retval, f->value, - f->value_length); + return njs_vm_value_string_create(vm, retval, f->value, + f->value_length); } } nxt_list_loop; @@ -250,7 +253,7 @@ nxt_http_js_ext_keys_header(njs_vm_t *vm, njs_value_t *value, njs_value_t *keys) return NJS_ERROR; } - rc = njs_vm_value_string_set(vm, value, f->name, f->name_length); + rc = njs_vm_value_string_create(vm, value, f->name, f->name_length); if (rc != NJS_OK) { return NJS_ERROR; } @@ -296,8 +299,8 @@ nxt_http_js_ext_get_cookie(njs_vm_t *vm, njs_object_prop_t *prop, if (key.length == nv->name_length && memcmp(key.start, nv->name, nv->name_length) == 0) { - return njs_vm_value_string_set(vm, retval, nv->value, - nv->value_length); + return njs_vm_value_string_create(vm, retval, nv->value, + nv->value_length); } } @@ -340,7 +343,7 @@ nxt_http_js_ext_keys_cookie(njs_vm_t *vm, njs_value_t *value, njs_value_t *keys) return NJS_ERROR; } - rc = njs_vm_value_string_set(vm, value, nv->name, nv->name_length); + rc = njs_vm_value_string_create(vm, value, nv->name, nv->name_length); if (rc != NJS_OK) { return NJS_ERROR; } @@ -380,7 +383,7 @@ nxt_http_js_ext_get_var(njs_vm_t *vm, njs_object_prop_t *prop, vv = nxt_var_get(&r->task, rtcf->tstr_state, &r->tstr_cache.var, &name, r); if (vv != NULL) { - return njs_vm_value_string_set(vm, retval, vv->start, vv->length); + return njs_vm_value_string_create(vm, retval, vv->start, vv->length); } njs_value_undefined_set(retval); diff --git a/src/nxt_http_parse.c b/src/nxt_http_parse.c index 50cbda2b..dd490e72 100644 --- a/src/nxt_http_parse.c +++ b/src/nxt_http_parse.c @@ -286,13 +286,11 @@ continue_target: case NXT_HTTP_TARGET_SPACE: rp->target_end = p; goto space_after_target; -#if 0 + case NXT_HTTP_TARGET_QUOTE_MARK: rp->quoted_target = 1; goto rest_of_target; -#else - case NXT_HTTP_TARGET_QUOTE_MARK: -#endif + case NXT_HTTP_TARGET_HASH: rp->complex_target = 1; goto rest_of_target; @@ -434,12 +432,7 @@ space_after_target: rp->request_line_end = p; - if (rp->complex_target != 0 -#if 0 - || rp->quoted_target != 0 -#endif - ) - { + if (rp->complex_target || rp->quoted_target) { rc = nxt_http_parse_complex_target(rp); if (nxt_slow_path(rc != NXT_OK)) { @@ -1041,7 +1034,7 @@ nxt_http_parse_complex_target(nxt_http_request_parse_t *rp) break; case sw_quoted: - //rp->quoted_target = 1; + rp->quoted_target = 1; if (ch >= '0' && ch <= '9') { high = (u_char) (ch - '0'); @@ -1189,6 +1182,7 @@ nxt_http_fields_hash(nxt_lvlhsh_t *hash, lhq.replace = 0; lhq.proto = &nxt_http_fields_hash_proto; + lhq.pool = NULL; for (i = 0; i < count; i++) { key = NXT_HTTP_FIELD_HASH_INIT; diff --git a/src/nxt_http_parse.h b/src/nxt_http_parse.h index fa95e842..9e2f6fab 100644 --- a/src/nxt_http_parse.h +++ b/src/nxt_http_parse.h @@ -61,9 +61,9 @@ struct nxt_http_request_parse_s { /* target with "/." */ uint8_t complex_target; /* 1 bit */ -#if 0 /* target with "%" */ uint8_t quoted_target; /* 1 bit */ +#if 0 /* target with " " */ uint8_t space_in_target; /* 1 bit */ #endif @@ -96,11 +96,8 @@ struct nxt_http_field_s { typedef struct { - u_char *pos; nxt_mp_t *mem_pool; - uint64_t chunk_size; - uint8_t state; uint8_t last; /* 1 bit */ uint8_t chunk_error; /* 1 bit */ diff --git a/src/nxt_http_proxy.c b/src/nxt_http_proxy.c index 6aa3aabb..7f6ad686 100644 --- a/src/nxt_http_proxy.c +++ b/src/nxt_http_proxy.c @@ -381,9 +381,10 @@ nxt_http_proxy_error(nxt_task_t *task, void *obj, void *data) r = obj; peer = r->peer; - nxt_http_proto[peer->protocol].peer_close(task, peer); - - nxt_mp_release(r->mem_pool); + if (!peer->closed) { + nxt_http_proto[peer->protocol].peer_close(task, peer); + nxt_mp_release(r->mem_pool); + } nxt_http_request_error(&r->task, r, peer->status); } diff --git a/src/nxt_http_request.c b/src/nxt_http_request.c index f8d8d887..a7e9ff69 100644 --- a/src/nxt_http_request.c +++ b/src/nxt_http_request.c @@ -24,8 +24,6 @@ static void nxt_http_request_proto_info(nxt_task_t *task, static void nxt_http_request_mem_buf_completion(nxt_task_t *task, void *obj, void *data); static void nxt_http_request_done(nxt_task_t *task, void *obj, void *data); -static nxt_int_t nxt_http_request_access_log(nxt_task_t *task, - nxt_http_request_t *r, nxt_router_conf_t *rtcf); static u_char *nxt_http_date_cache_handler(u_char *buf, nxt_realtime_t *now, struct tm *tm, size_t size, const char *format); @@ -540,15 +538,58 @@ static const nxt_http_request_state_t nxt_http_request_body_state }; +static nxt_int_t +nxt_http_request_chunked_transform(nxt_http_request_t *r) +{ + size_t size; + u_char *p, *end; + nxt_http_field_t *f; + + r->chunked_field->skip = 1; + + size = r->body->file_end; + + f = nxt_list_zero_add(r->fields); + if (nxt_slow_path(f == NULL)) { + return NXT_ERROR; + } + + nxt_http_field_name_set(f, "Content-Length"); + + p = nxt_mp_nget(r->mem_pool, NXT_OFF_T_LEN); + if (nxt_slow_path(p == NULL)) { + return NXT_ERROR; + } + + f->value = p; + end = nxt_sprintf(p, p + NXT_OFF_T_LEN, "%uz", size); + f->value_length = end - p; + + r->content_length = f; + r->content_length_n = size; + + return NXT_OK; +} + + static void nxt_http_request_ready(nxt_task_t *task, void *obj, void *data) { + nxt_int_t ret; nxt_http_action_t *action; nxt_http_request_t *r; r = obj; action = r->conf->socket_conf->action; + if (r->chunked) { + ret = nxt_http_request_chunked_transform(r); + if (nxt_slow_path(ret != NXT_OK)) { + nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); + return; + } + } + nxt_http_request_action(task, r, action); } @@ -818,12 +859,12 @@ nxt_http_request_error_handler(nxt_task_t *task, void *obj, void *data) void nxt_http_request_close_handler(nxt_task_t *task, void *obj, void *data) { - nxt_int_t ret; nxt_http_proto_t proto; nxt_router_conf_t *rtcf; nxt_http_request_t *r; nxt_http_protocol_t protocol; nxt_socket_conf_joint_t *conf; + nxt_router_access_log_t *access_log; r = obj; proto.any = data; @@ -835,8 +876,10 @@ nxt_http_request_close_handler(nxt_task_t *task, void *obj, void *data) r->logged = 1; if (rtcf->access_log != NULL) { - ret = nxt_http_request_access_log(task, r, rtcf); - if (ret == NXT_OK) { + access_log = rtcf->access_log; + + if (nxt_http_cond_value(task, r, &rtcf->log_cond)) { + access_log->handler(task, r, access_log, rtcf->log_format); return; } } @@ -868,57 +911,6 @@ nxt_http_request_close_handler(nxt_task_t *task, void *obj, void *data) } -static nxt_int_t -nxt_http_request_access_log(nxt_task_t *task, nxt_http_request_t *r, - nxt_router_conf_t *rtcf) -{ - nxt_int_t ret; - nxt_str_t str; - nxt_bool_t expr; - nxt_router_access_log_t *access_log; - - access_log = rtcf->access_log; - - expr = 1; - - if (rtcf->log_expr != NULL) { - - if (nxt_tstr_is_const(rtcf->log_expr)) { - nxt_tstr_str(rtcf->log_expr, &str); - - } else { - ret = nxt_tstr_query_init(&r->tstr_query, rtcf->tstr_state, - &r->tstr_cache, r, r->mem_pool); - if (nxt_slow_path(ret != NXT_OK)) { - return NXT_DECLINED; - } - - nxt_tstr_query(task, r->tstr_query, rtcf->log_expr, &str); - - if (nxt_slow_path(nxt_tstr_query_failed(r->tstr_query))) { - return NXT_DECLINED; - } - } - - if (str.length == 0 - || nxt_str_eq(&str, "0", 1) - || nxt_str_eq(&str, "false", 5) - || nxt_str_eq(&str, "null", 4) - || nxt_str_eq(&str, "undefined", 9)) - { - expr = 0; - } - } - - if (rtcf->log_negate ^ expr) { - access_log->handler(task, r, access_log, rtcf->log_format); - return NXT_OK; - } - - return NXT_DECLINED; -} - - static u_char * nxt_http_date_cache_handler(u_char *buf, nxt_realtime_t *now, struct tm *tm, size_t size, const char *format) @@ -946,6 +938,10 @@ nxt_http_arguments_parse(nxt_http_request_t *r) return NULL; } + if (nxt_slow_path(r->args->start == NULL)) { + goto end; + } + hash = NXT_HTTP_FIELD_HASH_INIT; name = NULL; name_length = 0; @@ -1026,6 +1022,8 @@ nxt_http_arguments_parse(nxt_http_request_t *r) } } +end: + r->arguments = args; return args; @@ -1316,3 +1314,48 @@ nxt_http_cookie_hash(nxt_mp_t *mp, nxt_str_t *name) { return nxt_http_field_hash(mp, name, 1, NXT_HTTP_URI_ENCODING_NONE); } + + +int +nxt_http_cond_value(nxt_task_t *task, nxt_http_request_t *r, + nxt_tstr_cond_t *cond) +{ + nxt_int_t ret; + nxt_str_t str; + nxt_bool_t expr; + nxt_router_conf_t *rtcf; + + rtcf = r->conf->socket_conf->router_conf; + + expr = 1; + + if (cond->expr != NULL) { + + if (nxt_tstr_is_const(cond->expr)) { + nxt_tstr_str(cond->expr, &str); + + } else { + ret = nxt_tstr_query_init(&r->tstr_query, rtcf->tstr_state, + &r->tstr_cache, r, r->mem_pool); + if (nxt_slow_path(ret != NXT_OK)) { + return -1; + } + + ret = nxt_tstr_query(task, r->tstr_query, cond->expr, &str); + if (nxt_slow_path(ret != NXT_OK)) { + return -1; + } + } + + if (str.length == 0 + || nxt_str_eq(&str, "0", 1) + || nxt_str_eq(&str, "false", 5) + || nxt_str_eq(&str, "null", 4) + || nxt_str_eq(&str, "undefined", 9)) + { + expr = 0; + } + } + + return cond->negate ^ expr; +} diff --git a/src/nxt_http_return.c b/src/nxt_http_return.c index b50e4ad0..a3551683 100644 --- a/src/nxt_http_return.c +++ b/src/nxt_http_return.c @@ -24,8 +24,8 @@ static nxt_http_action_t *nxt_http_return(nxt_task_t *task, nxt_http_request_t *r, nxt_http_action_t *action); static nxt_int_t nxt_http_return_encode(nxt_mp_t *mp, nxt_str_t *encoded, const nxt_str_t *location); -static void nxt_http_return_send_ready(nxt_task_t *task, void *obj, void *data); -static void nxt_http_return_send_error(nxt_task_t *task, void *obj, void *data); +static void nxt_http_return_send(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_return_ctx_t *ctx); static const nxt_http_request_state_t nxt_http_return_send_state; @@ -120,8 +120,6 @@ nxt_http_return(nxt_task_t *task, nxt_http_request_t *r, ctx->encoded = conf->encoded; } - nxt_http_return_send_ready(task, r, ctx); - } else { rtcf = r->conf->socket_conf->router_conf; @@ -131,13 +129,15 @@ nxt_http_return(nxt_task_t *task, nxt_http_request_t *r, goto fail; } - nxt_tstr_query(task, r->tstr_query, conf->location, &ctx->location); - - nxt_tstr_query_resolve(task, r->tstr_query, ctx, - nxt_http_return_send_ready, - nxt_http_return_send_error); + ret = nxt_tstr_query(task, r->tstr_query, conf->location, + &ctx->location); + if (nxt_slow_path(ret != NXT_OK)) { + goto fail; + } } + nxt_http_return_send(task, r, ctx); + return NULL; fail: @@ -174,15 +174,11 @@ nxt_http_return_encode(nxt_mp_t *mp, nxt_str_t *encoded, static void -nxt_http_return_send_ready(nxt_task_t *task, void *obj, void *data) +nxt_http_return_send(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_return_ctx_t *ctx) { - nxt_int_t ret; - nxt_http_field_t *field; - nxt_http_request_t *r; - nxt_http_return_ctx_t *ctx; - - r = obj; - ctx = data; + nxt_int_t ret; + nxt_http_field_t *field; if (ctx != NULL) { if (ctx->location.length > 0) { @@ -216,17 +212,6 @@ fail: } -static void -nxt_http_return_send_error(nxt_task_t *task, void *obj, void *data) -{ - nxt_http_request_t *r; - - r = obj; - - nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); -} - - static const nxt_http_request_state_t nxt_http_return_send_state nxt_aligned(64) = { diff --git a/src/nxt_http_rewrite.c b/src/nxt_http_rewrite.c index fb216eeb..5de15ed7 100644 --- a/src/nxt_http_rewrite.c +++ b/src/nxt_http_rewrite.c @@ -28,9 +28,8 @@ nxt_http_rewrite_init(nxt_router_conf_t *rtcf, nxt_http_action_t *action, nxt_int_t nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r) { - u_char *p; nxt_int_t ret; - nxt_str_t str, encoded_path, target; + nxt_str_t str; nxt_router_conf_t *rtcf; nxt_http_action_t *action; nxt_http_request_parse_t rp; @@ -53,9 +52,8 @@ nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r) return NXT_ERROR; } - nxt_tstr_query(task, r->tstr_query, action->rewrite, &str); - - if (nxt_slow_path(nxt_tstr_query_failed(r->tstr_query))) { + ret = nxt_tstr_query(task, r->tstr_query, action->rewrite, &str); + if (nxt_slow_path(ret != NXT_OK)) { return NXT_ERROR; } } @@ -72,30 +70,6 @@ nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r) return NXT_ERROR; } - p = (rp.args.length > 0) ? rp.args.start - 1 : rp.target_end; - - encoded_path.start = rp.target_start; - encoded_path.length = p - encoded_path.start; - - if (r->args->length == 0) { - r->target = encoded_path; - - } else { - target.length = encoded_path.length + 1 + r->args->length; - - target.start = nxt_mp_alloc(r->mem_pool, target.length); - if (target.start == NULL) { - return NXT_ERROR; - } - - p = nxt_cpymem(target.start, encoded_path.start, encoded_path.length); - *p++ = '?'; - nxt_memcpy(p, r->args->start, r->args->length); - - r->target = target; - r->args->start = p; - } - r->path = nxt_mp_alloc(r->mem_pool, sizeof(nxt_str_t)); if (nxt_slow_path(r->path == NULL)) { return NXT_ERROR; @@ -103,8 +77,11 @@ nxt_http_rewrite(nxt_task_t *task, nxt_http_request_t *r) *r->path = rp.path; + r->uri_changed = 1; + r->quoted_target = rp.quoted_target; + if (nxt_slow_path(r->log_route)) { - nxt_log(task, NXT_LOG_NOTICE, "URI rewritten to \"%V\"", &r->target); + nxt_log(task, NXT_LOG_NOTICE, "URI rewritten to \"%V\"", r->path); } return NXT_OK; diff --git a/src/nxt_http_route.c b/src/nxt_http_route.c index 4a64d5c1..bd0646f3 100644 --- a/src/nxt_http_route.c +++ b/src/nxt_http_route.c @@ -51,6 +51,7 @@ typedef struct { nxt_conf_value_t *query; nxt_conf_value_t *source; nxt_conf_value_t *destination; + nxt_conf_value_t *condition; } nxt_http_route_match_conf_t; @@ -103,13 +104,13 @@ struct nxt_http_route_rule_s { } name; } u; - nxt_http_route_pattern_t pattern[0]; + nxt_http_route_pattern_t pattern[]; }; typedef struct { uint32_t items; - nxt_http_route_rule_t *rule[0]; + nxt_http_route_rule_t *rule[]; } nxt_http_route_ruleset_t; @@ -117,7 +118,7 @@ typedef struct { /* The object must be the first field. */ nxt_http_route_object_t object:8; uint32_t items; - nxt_http_route_ruleset_t *ruleset[0]; + nxt_http_route_ruleset_t *ruleset[]; } nxt_http_route_table_t; @@ -125,7 +126,7 @@ struct nxt_http_route_addr_rule_s { /* The object must be the first field. */ nxt_http_route_object_t object:8; uint32_t items; - nxt_http_route_addr_pattern_t addr_pattern[0]; + nxt_http_route_addr_pattern_t addr_pattern[]; }; @@ -138,21 +139,22 @@ typedef union { typedef struct { uint32_t items; + nxt_tstr_cond_t condition; nxt_http_action_t action; - nxt_http_route_test_t test[0]; + nxt_http_route_test_t test[]; } nxt_http_route_match_t; struct nxt_http_route_s { nxt_str_t name; uint32_t items; - nxt_http_route_match_t *match[0]; + nxt_http_route_match_t *match[]; }; struct nxt_http_routes_s { uint32_t items; - nxt_http_route_t *route[0]; + nxt_http_route_t *route[]; }; @@ -193,8 +195,8 @@ static nxt_int_t nxt_http_action_resolve(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_http_action_t *action); static nxt_http_action_t *nxt_http_pass_var(nxt_task_t *task, nxt_http_request_t *r, nxt_http_action_t *action); -static void nxt_http_pass_query_ready(nxt_task_t *task, void *obj, void *data); -static void nxt_http_pass_query_error(nxt_task_t *task, void *obj, void *data); +static void nxt_http_pass_query(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_action_t *action); static nxt_int_t nxt_http_pass_find(nxt_mp_t *mp, nxt_router_conf_t *rtcf, nxt_str_t *pass, nxt_http_action_t *action); static nxt_int_t nxt_http_route_find(nxt_http_routes_t *routes, nxt_str_t *name, @@ -350,6 +352,12 @@ static nxt_conf_map_t nxt_http_route_match_conf[] = { NXT_CONF_MAP_PTR, offsetof(nxt_http_route_match_conf_t, destination), }, + + { + nxt_string("if"), + NXT_CONF_MAP_PTR, + offsetof(nxt_http_route_match_conf_t, condition), + }, }; @@ -397,7 +405,9 @@ nxt_http_route_match_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, uint32_t n; nxt_mp_t *mp; nxt_int_t ret; - nxt_conf_value_t *match_conf, *action_conf; + nxt_str_t str; + nxt_conf_value_t *match_conf, *action_conf, *condition; + nxt_router_conf_t *rtcf; nxt_http_route_test_t *test; nxt_http_route_rule_t *rule; nxt_http_route_table_t *table; @@ -405,17 +415,30 @@ nxt_http_route_match_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_http_route_addr_rule_t *addr_rule; nxt_http_route_match_conf_t mtcf; - static nxt_str_t match_path = nxt_string("/match"); - static nxt_str_t action_path = nxt_string("/action"); + static const nxt_str_t if_path = nxt_string("/if"); + static const nxt_str_t match_path = nxt_string("/match"); + static const nxt_str_t action_path = nxt_string("/action"); match_conf = nxt_conf_get_path(cv, &match_path); n = (match_conf != NULL) ? nxt_conf_object_members_count(match_conf) : 0; size = sizeof(nxt_http_route_match_t) + n * sizeof(nxt_http_route_test_t *); - mp = tmcf->router_conf->mem_pool; + rtcf = tmcf->router_conf; + mp = rtcf->mem_pool; + + condition = NULL; + + if (match_conf != NULL) { + condition = nxt_conf_get_path(match_conf, &if_path); + + if (condition != NULL) { + n--; + size -= sizeof(nxt_http_route_test_t *); + } + } - match = nxt_mp_alloc(mp, size); + match = nxt_mp_zalloc(mp, size); if (nxt_slow_path(match == NULL)) { return NULL; } @@ -432,7 +455,7 @@ nxt_http_route_match_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, return NULL; } - if (n == 0) { + if (n == 0 && condition == NULL) { return match; } @@ -445,6 +468,15 @@ nxt_http_route_match_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, return NULL; } + if (condition != NULL) { + nxt_conf_get_string(condition, &str); + + ret = nxt_tstr_cond_compile(rtcf->tstr_state, &str, &match->condition); + if (nxt_slow_path(ret != NXT_OK)) { + return NULL; + } + } + test = &match->test[0]; if (mtcf.scheme != NULL) { @@ -1344,10 +1376,13 @@ nxt_http_pass_var(nxt_task_t *task, nxt_http_request_t *r, action->u.pass = nxt_pointer_to(action, sizeof(nxt_http_action_t)); - nxt_tstr_query(task, r->tstr_query, tstr, action->u.pass); - nxt_tstr_query_resolve(task, r->tstr_query, action, - nxt_http_pass_query_ready, - nxt_http_pass_query_error); + ret = nxt_tstr_query(task, r->tstr_query, tstr, action->u.pass); + if (nxt_slow_path(ret != NXT_OK)) { + goto fail; + } + + nxt_http_pass_query(task, r, action); + return NULL; fail: @@ -1358,16 +1393,13 @@ fail: static void -nxt_http_pass_query_ready(nxt_task_t *task, void *obj, void *data) +nxt_http_pass_query(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_action_t *action) { - nxt_int_t ret; - nxt_router_conf_t *rtcf; - nxt_http_action_t *action; - nxt_http_status_t status; - nxt_http_request_t *r; - - r = obj; - action = data; + nxt_int_t ret; + nxt_router_conf_t *rtcf; + nxt_http_status_t status; + rtcf = r->conf->socket_conf->router_conf; nxt_debug(task, "http pass lookup: %V", action->u.pass); @@ -1386,17 +1418,6 @@ nxt_http_pass_query_ready(nxt_task_t *task, void *obj, void *data) } -static void -nxt_http_pass_query_error(nxt_task_t *task, void *obj, void *data) -{ - nxt_http_request_t *r; - - r = obj; - - nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); -} - - static nxt_int_t nxt_http_pass_find(nxt_mp_t *mp, nxt_router_conf_t *rtcf, nxt_str_t *pass, nxt_http_action_t *action) @@ -1607,6 +1628,12 @@ nxt_http_route_match(nxt_task_t *task, nxt_http_request_t *r, nxt_int_t ret; nxt_http_route_test_t *test, *end; + ret = nxt_http_cond_value(task, r, &match->condition); + if (ret <= 0) { + /* 0 => NULL, -1 => NXT_HTTP_ACTION_ERROR. */ + return (nxt_http_action_t *) (intptr_t) ret; + } + test = &match->test[0]; end = test + match->items; @@ -2134,6 +2161,10 @@ nxt_http_route_pattern(nxt_http_request_t *r, nxt_http_route_pattern_t *pattern, return 0; } + if (nxt_slow_path(start == NULL)) { + return 1; + } + nxt_assert(pattern->u.pattern_slices != NULL); pattern_slices = pattern->u.pattern_slices; diff --git a/src/nxt_http_set_headers.c b/src/nxt_http_set_headers.c index 25dd7478..7fd6aba5 100644 --- a/src/nxt_http_set_headers.c +++ b/src/nxt_http_set_headers.c @@ -139,9 +139,8 @@ nxt_http_set_headers(nxt_http_request_t *r) return NXT_ERROR; } - nxt_tstr_query(&r->task, r->tstr_query, hv->value, &value[i]); - - if (nxt_slow_path(nxt_tstr_query_failed(r->tstr_query))) { + ret = nxt_tstr_query(&r->task, r->tstr_query, hv->value, &value[i]); + if (nxt_slow_path(ret != NXT_OK)) { return NXT_ERROR; } } diff --git a/src/nxt_http_static.c b/src/nxt_http_static.c index ee25015e..67591595 100644 --- a/src/nxt_http_static.c +++ b/src/nxt_http_static.c @@ -47,8 +47,8 @@ static nxt_http_action_t *nxt_http_static(nxt_task_t *task, nxt_http_request_t *r, nxt_http_action_t *action); static void nxt_http_static_iterate(nxt_task_t *task, nxt_http_request_t *r, nxt_http_static_ctx_t *ctx); -static void nxt_http_static_send_ready(nxt_task_t *task, void *obj, void *data); -static void nxt_http_static_send_error(nxt_task_t *task, void *obj, void *data); +static void nxt_http_static_send(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_static_ctx_t *ctx); static void nxt_http_static_next(nxt_task_t *task, nxt_http_request_t *r, nxt_http_static_ctx_t *ctx, nxt_http_status_t status); #if (NXT_HAVE_OPENAT2) @@ -271,35 +271,44 @@ nxt_http_static_iterate(nxt_task_t *task, nxt_http_request_t *r, } #endif - nxt_http_static_send_ready(task, r, ctx); - } else { rtcf = r->conf->socket_conf->router_conf; ret = nxt_tstr_query_init(&r->tstr_query, rtcf->tstr_state, &r->tstr_cache, r, r->mem_pool); if (nxt_slow_path(ret != NXT_OK)) { - nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); - return; + goto fail; } - nxt_tstr_query(task, r->tstr_query, share->tstr, &ctx->share); + ret = nxt_tstr_query(task, r->tstr_query, share->tstr, &ctx->share); + if (nxt_slow_path(ret != NXT_OK)) { + goto fail; + } #if (NXT_HAVE_OPENAT2) if (conf->chroot != NULL && ctx->share_idx == 0) { - nxt_tstr_query(task, r->tstr_query, conf->chroot, &ctx->chroot); + ret = nxt_tstr_query(task, r->tstr_query, conf->chroot, + &ctx->chroot); + if (nxt_slow_path(ret != NXT_OK)) { + goto fail; + } } #endif + } + + nxt_http_static_send(task, r, ctx); + + return; + +fail: - nxt_tstr_query_resolve(task, r->tstr_query, ctx, - nxt_http_static_send_ready, - nxt_http_static_send_error); - } + nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); } static void -nxt_http_static_send_ready(nxt_task_t *task, void *obj, void *data) +nxt_http_static_send(nxt_task_t *task, nxt_http_request_t *r, + nxt_http_static_ctx_t *ctx) { size_t length, encode; u_char *p, *fname; @@ -314,13 +323,9 @@ nxt_http_static_send_ready(nxt_task_t *task, void *obj, void *data) nxt_http_status_t status; nxt_router_conf_t *rtcf; nxt_http_action_t *action; - nxt_http_request_t *r; nxt_work_handler_t body_handler; - nxt_http_static_ctx_t *ctx; nxt_http_static_conf_t *conf; - r = obj; - ctx = data; action = ctx->action; conf = action->u.conf; rtcf = r->conf->socket_conf->router_conf; @@ -663,17 +668,6 @@ fail: static void -nxt_http_static_send_error(nxt_task_t *task, void *obj, void *data) -{ - nxt_http_request_t *r; - - r = obj; - - nxt_http_request_error(task, r, NXT_HTTP_INTERNAL_SERVER_ERROR); -} - - -static void nxt_http_static_next(nxt_task_t *task, nxt_http_request_t *r, nxt_http_static_ctx_t *ctx, nxt_http_status_t status) { diff --git a/src/nxt_http_variables.c b/src/nxt_http_variables.c index 85ae6004..3a1746b3 100644 --- a/src/nxt_http_variables.c +++ b/src/nxt_http_variables.c @@ -366,8 +366,9 @@ nxt_http_log_date(u_char *buf, nxt_realtime_t *now, struct tm *tm, u_char sign; time_t gmtoff; - static const char *month[] = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; + static const char * const month[] = { "Jan", "Feb", "Mar", "Apr", "May", + "Jun", "Jul", "Aug", "Sep", "Oct", + "Nov", "Dec" }; gmtoff = nxt_timezone(tm) / 60; diff --git a/src/nxt_isolation.c b/src/nxt_isolation.c index ed5e0d76..909a43f4 100644 --- a/src/nxt_isolation.c +++ b/src/nxt_isolation.c @@ -224,8 +224,8 @@ nxt_isolation_set_cgroup(nxt_task_t *task, nxt_conf_value_t *isolation, nxt_str_t str; nxt_conf_value_t *obj; - static nxt_str_t cgname = nxt_string("cgroup"); - static nxt_str_t path = nxt_string("path"); + static const nxt_str_t cgname = nxt_string("cgroup"); + static const nxt_str_t path = nxt_string("path"); obj = nxt_conf_get_object_member(isolation, &cgname, NULL); if (obj == NULL) { @@ -260,7 +260,7 @@ nxt_isolation_set_namespaces(nxt_task_t *task, nxt_conf_value_t *isolation, nxt_int_t ret; nxt_conf_value_t *obj; - static nxt_str_t nsname = nxt_string("namespaces"); + static const nxt_str_t nsname = nxt_string("namespaces"); obj = nxt_conf_get_object_member(isolation, &nsname, NULL); if (obj != NULL) { @@ -286,8 +286,8 @@ nxt_isolation_set_creds(nxt_task_t *task, nxt_conf_value_t *isolation, nxt_clone_t *clone; nxt_conf_value_t *array; - static nxt_str_t uidname = nxt_string("uidmap"); - static nxt_str_t gidname = nxt_string("gidmap"); + static const nxt_str_t uidname = nxt_string("uidmap"); + static const nxt_str_t gidname = nxt_string("gidmap"); clone = &process->isolation.clone; @@ -323,7 +323,7 @@ nxt_isolation_credential_map(nxt_task_t *task, nxt_mp_t *mp, nxt_uint_t i; nxt_conf_value_t *obj; - static nxt_conf_map_t nxt_clone_map_entry_conf[] = { + static const nxt_conf_map_t nxt_clone_map_entry_conf[] = { { nxt_string("container"), NXT_CONF_MAP_INT64, @@ -496,7 +496,7 @@ nxt_isolation_set_rootfs(nxt_task_t *task, nxt_conf_value_t *isolation, nxt_str_t str; nxt_conf_value_t *obj; - static nxt_str_t rootfs_name = nxt_string("rootfs"); + static const nxt_str_t rootfs_name = nxt_string("rootfs"); obj = nxt_conf_get_object_member(isolation, &rootfs_name, NULL); if (obj != NULL) { @@ -536,10 +536,10 @@ nxt_isolation_set_automount(nxt_task_t *task, nxt_conf_value_t *isolation, nxt_conf_value_t *conf, *value; nxt_process_automount_t *automount; - static nxt_str_t automount_name = nxt_string("automount"); - static nxt_str_t langdeps_name = nxt_string("language_deps"); - static nxt_str_t tmp_name = nxt_string("tmpfs"); - static nxt_str_t proc_name = nxt_string("procfs"); + static const nxt_str_t automount_name = nxt_string("automount"); + static const nxt_str_t langdeps_name = nxt_string("language_deps"); + static const nxt_str_t tmp_name = nxt_string("tmpfs"); + static const nxt_str_t proc_name = nxt_string("procfs"); automount = &process->isolation.automount; @@ -780,7 +780,7 @@ nxt_isolation_prepare_rootfs(nxt_task_t *task, nxt_process_t *process) continue; } - ret = nxt_fs_mkdir_all(dst, S_IRWXU | S_IRWXG | S_IRWXO); + ret = nxt_fs_mkdir_p(dst, 0777); if (nxt_slow_path(ret != NXT_OK)) { nxt_alert(task, "mkdir(%s) %E", dst, nxt_errno); goto undo; @@ -1110,7 +1110,7 @@ nxt_isolation_set_new_privs(nxt_task_t *task, nxt_conf_value_t *isolation, { nxt_conf_value_t *obj; - static nxt_str_t new_privs_name = nxt_string("new_privs"); + static const nxt_str_t new_privs_name = nxt_string("new_privs"); obj = nxt_conf_get_object_member(isolation, &new_privs_name, NULL); if (obj != NULL) { diff --git a/src/nxt_js.c b/src/nxt_js.c index 6885afb7..d46231bd 100644 --- a/src/nxt_js.c +++ b/src/nxt_js.c @@ -69,14 +69,6 @@ nxt_js_module_loader(njs_vm_t *vm, njs_external_ptr_t external, njs_str_t *name) } -static njs_vm_ops_t nxt_js_ops = { - NULL, - NULL, - nxt_js_module_loader, - NULL, -}; - - njs_int_t nxt_js_proto_id; @@ -127,13 +119,14 @@ nxt_js_vm_create(nxt_js_conf_t *jcf) { u_char *p; size_t size; + njs_vm_t *vm; nxt_uint_t i; njs_vm_opt_t opts; nxt_js_module_t *module, *mod; - static nxt_str_t import_str = nxt_string("import"); - static nxt_str_t from_str = nxt_string("from"); - static nxt_str_t global_str = nxt_string("globalThis"); + static const nxt_str_t import_str = nxt_string("import"); + static const nxt_str_t from_str = nxt_string("from"); + static const nxt_str_t global_str = nxt_string("globalThis"); njs_vm_opt_init(&opts); @@ -146,7 +139,6 @@ nxt_js_vm_create(nxt_js_conf_t *jcf) goto done; } - opts.ops = &nxt_js_ops; opts.external = jcf; size = 0; @@ -203,7 +195,13 @@ nxt_js_vm_create(nxt_js_conf_t *jcf) done: - return njs_vm_create(&opts); + vm = njs_vm_create(&opts); + + if (nxt_fast_path(vm != NULL)) { + njs_vm_set_module_loader(vm, nxt_js_module_loader, jcf); + } + + return vm; } @@ -239,14 +237,15 @@ nxt_js_add_tpl(nxt_js_conf_t *jcf, nxt_str_t *str, nxt_bool_t strz) nxt_js_t *js; nxt_str_t *func; - static nxt_str_t func_str = nxt_string("function(uri, host, remoteAddr, " - "args, headers, cookies, vars) {" - " return "); + static const nxt_str_t func_str = + nxt_string("function(uri, host, remoteAddr, " + "args, headers, cookies, vars) {" + " return "); /* * Appending a terminating null character if strz is true. */ - static nxt_str_t strz_str = nxt_string(" + '\\x00'"); + static const nxt_str_t strz_str = nxt_string(" + '\\x00'"); size = func_str.length + str->length + 1; diff --git a/src/nxt_lib.c b/src/nxt_lib.c index aba07dda..de23ce0a 100644 --- a/src/nxt_lib.c +++ b/src/nxt_lib.c @@ -32,7 +32,7 @@ const char *malloc_conf = "junk:true"; nxt_int_t nxt_lib_start(const char *app, char **argv, char ***envp) { - int n; + int n = 0; nxt_int_t flags; nxt_bool_t update; nxt_thread_t *thread; @@ -87,13 +87,32 @@ nxt_lib_start(const char *app, char **argv, char ***envp) #ifdef _SC_NPROCESSORS_ONLN /* Linux, FreeBSD, Solaris, MacOSX. */ n = sysconf(_SC_NPROCESSORS_ONLN); +#endif + +#if (NXT_HAVE_LINUX_SCHED_GETAFFINITY) + if (n > 0) { + int err; + size_t size; + cpu_set_t *set; + + set = CPU_ALLOC(n); + if (set == NULL) { + return NXT_ERROR; + } + + size = CPU_ALLOC_SIZE(n); + + err = sched_getaffinity(0, size, set); + if (err == 0) { + n = CPU_COUNT_S(size, set); + } + + CPU_FREE(set); + } #elif (NXT_HPUX) n = mpctl(MPC_GETNUMSPUS, NULL, NULL); -#else - n = 0; - #endif nxt_debug(&nxt_main_task, "ncpu: %d", n); diff --git a/src/nxt_listen_socket.c b/src/nxt_listen_socket.c index 047c1ef9..4fe3e20b 100644 --- a/src/nxt_listen_socket.c +++ b/src/nxt_listen_socket.c @@ -132,7 +132,7 @@ nxt_listen_socket_create(nxt_task_t *task, nxt_mp_t *mp, nxt_runtime_t *rt = thr->runtime; name = (nxt_file_name_t *) sa->u.sockaddr_un.sun_path; - access = rt->control_mode > 0 ? rt->control_mode : S_IRUSR | S_IWUSR; + access = rt->control_mode > 0 ? rt->control_mode : 0600; if (nxt_file_set_access(name, access) != NXT_OK) { goto listen_fail; diff --git a/src/nxt_listen_socket.h b/src/nxt_listen_socket.h index e2435b76..8bf320bc 100644 --- a/src/nxt_listen_socket.h +++ b/src/nxt_listen_socket.h @@ -35,16 +35,16 @@ typedef struct { } nxt_listen_socket_t; -#if (NXT_FREEBSD || NXT_MACOSX || NXT_OPENBSD) +#if (NXT_LINUX || NXT_FREEBSD || NXT_MACOSX || NXT_OPENBSD) /* - * A backlog is limited by system-wide sysctl kern.ipc.somaxconn. - * This is supported by FreeBSD 2.2, OpenBSD 2.0, and MacOSX. + * A backlog is limited by system-wide sysctl {net.core,kern.ipc}.somaxconn. + * This is supported by Linux, FreeBSD 2.2, OpenBSD 2.0, and MacOSX. */ #define NXT_LISTEN_BACKLOG -1 #else /* - * Linux, Solaris, and NetBSD treat negative value as 0. + * Solaris and NetBSD treat negative value as 0. * 511 is a safe default. */ #define NXT_LISTEN_BACKLOG 511 diff --git a/src/nxt_main.h b/src/nxt_main.h index aa96256e..7880e55f 100644 --- a/src/nxt_main.h +++ b/src/nxt_main.h @@ -104,7 +104,6 @@ typedef struct { #include <nxt_hash.h> #include <nxt_sort.h> -#include <nxt_vector.h> #include <nxt_list.h> #include <nxt_service.h> diff --git a/src/nxt_main_process.c b/src/nxt_main_process.c index 060ead41..00318226 100644 --- a/src/nxt_main_process.c +++ b/src/nxt_main_process.c @@ -1275,13 +1275,11 @@ nxt_main_listening_socket(nxt_sockaddr_t *sa, nxt_listening_socket_t *ls) && sa->u.sockaddr_un.sun_path[0] != '\0') { char *filename; - mode_t access; nxt_thread_t *thr; filename = sa->u.sockaddr_un.sun_path; - access = (S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH); - if (chmod(filename, access) != 0) { + if (chmod(filename, 0666) != 0) { ls->end = nxt_sprintf(ls->start, ls->end, "chmod(\\\"%s\\\") failed %E", filename, nxt_errno); @@ -1357,6 +1355,12 @@ static nxt_conf_map_t nxt_app_lang_module_map[] = { }, { + nxt_string("name"), + NXT_CONF_MAP_CSTRZ, + offsetof(nxt_app_lang_module_t, name), + }, + + { nxt_string("version"), NXT_CONF_MAP_CSTRZ, offsetof(nxt_app_lang_module_t, version), diff --git a/src/nxt_php_sapi.c b/src/nxt_php_sapi.c index 77117533..da667b66 100644 --- a/src/nxt_php_sapi.c +++ b/src/nxt_php_sapi.c @@ -377,9 +377,9 @@ nxt_php_setup(nxt_task_t *task, nxt_process_t *process, nxt_conf_value_t *value; nxt_php_app_conf_t *c; - static nxt_str_t file_str = nxt_string("file"); - static nxt_str_t user_str = nxt_string("user"); - static nxt_str_t admin_str = nxt_string("admin"); + static const nxt_str_t file_str = nxt_string("file"); + static const nxt_str_t user_str = nxt_string("user"); + static const nxt_str_t admin_str = nxt_string("admin"); c = &conf->u.php; @@ -529,9 +529,9 @@ nxt_php_set_target(nxt_task_t *task, nxt_php_target_t *target, nxt_int_t ret; nxt_conf_value_t *value; - static nxt_str_t root_str = nxt_string("root"); - static nxt_str_t script_str = nxt_string("script"); - static nxt_str_t index_str = nxt_string("index"); + static const nxt_str_t root_str = nxt_string("root"); + static const nxt_str_t script_str = nxt_string("script"); + static const nxt_str_t index_str = nxt_string("index"); value = nxt_conf_get_object_member(conf, &root_str, NULL); diff --git a/src/nxt_port_memory.c b/src/nxt_port_memory.c index 0a4a6c53..ad6e97ad 100644 --- a/src/nxt_port_memory.c +++ b/src/nxt_port_memory.c @@ -393,8 +393,7 @@ nxt_shm_open(nxt_task_t *task, size_t size) #elif (NXT_HAVE_SHM_OPEN_ANON) - fd = shm_open(SHM_ANON, O_RDWR, S_IRUSR | S_IWUSR); - + fd = shm_open(SHM_ANON, O_RDWR, 0600); if (nxt_slow_path(fd == -1)) { nxt_alert(task, "shm_open(SHM_ANON) failed %E", nxt_errno); @@ -408,8 +407,7 @@ nxt_shm_open(nxt_task_t *task, size_t size) /* Just in case. */ shm_unlink((char *) name); - fd = shm_open((char *) name, O_CREAT | O_EXCL | O_RDWR, S_IRUSR | S_IWUSR); - + fd = shm_open((char *) name, O_CREAT | O_EXCL | O_RDWR, 0600); if (nxt_slow_path(fd == -1)) { nxt_alert(task, "shm_open(%s) failed %E", name, nxt_errno); @@ -454,6 +452,10 @@ nxt_port_mmap_get(nxt_task_t *task, nxt_port_mmaps_t *mmaps, nxt_chunk_id_t *c, nxt_thread_mutex_lock(&mmaps->mutex); + if (nxt_slow_path(mmaps->elts == NULL)) { + goto end; + } + end_port_mmap = mmaps->elts + mmaps->size; for (port_mmap = mmaps->elts; @@ -500,6 +502,8 @@ nxt_port_mmap_get(nxt_task_t *task, nxt_port_mmaps_t *mmaps, nxt_chunk_id_t *c, /* TODO introduce port_mmap limit and release wait. */ +end: + *c = 0; mmap_handler = nxt_port_new_port_mmap(task, mmaps, tracking, n); diff --git a/src/nxt_random.c b/src/nxt_random.c index 1211896c..8290488c 100644 --- a/src/nxt_random.c +++ b/src/nxt_random.c @@ -148,10 +148,10 @@ nxt_random(nxt_random_t *r) nxt_random_stir(r); } - val = nxt_random_byte(r) << 24; - val |= nxt_random_byte(r) << 16; - val |= nxt_random_byte(r) << 8; - val |= nxt_random_byte(r); + val = (uint32_t) nxt_random_byte(r) << 24; + val |= (uint32_t) nxt_random_byte(r) << 16; + val |= (uint32_t) nxt_random_byte(r) << 8; + val |= (uint32_t) nxt_random_byte(r); return val; } diff --git a/src/nxt_router.c b/src/nxt_router.c index 1a1aca2b..076cd134 100644 --- a/src/nxt_router.c +++ b/src/nxt_router.c @@ -40,6 +40,7 @@ typedef struct { typedef struct { nxt_str_t pass; nxt_str_t application; + int backlog; } nxt_router_listener_conf_t; @@ -166,7 +167,7 @@ static void nxt_router_app_prefork_ready(nxt_task_t *task, static void nxt_router_app_prefork_error(nxt_task_t *task, nxt_port_recv_msg_t *msg, void *data); static nxt_socket_conf_t *nxt_router_socket_conf(nxt_task_t *task, - nxt_router_temp_conf_t *tmcf, nxt_str_t *name); + nxt_router_temp_conf_t *tmcf, nxt_str_t *name, int backlog); static nxt_int_t nxt_router_listen_socket_find(nxt_router_temp_conf_t *tmcf, nxt_socket_conf_t *nskcf, nxt_sockaddr_t *sa); @@ -1077,14 +1078,14 @@ nxt_router_temp_conf(nxt_task_t *task) rtcf = nxt_mp_zget(mp, sizeof(nxt_router_conf_t)); if (nxt_slow_path(rtcf == NULL)) { - goto fail; + goto out_free_mp; } rtcf->mem_pool = mp; rtcf->tstr_state = nxt_tstr_state_new(mp, 0); if (nxt_slow_path(rtcf->tstr_state == NULL)) { - goto fail; + goto out_free_mp; } #if (NXT_HAVE_NJS) @@ -1093,12 +1094,12 @@ nxt_router_temp_conf(nxt_task_t *task) tmp = nxt_mp_create(1024, 128, 256, 32); if (nxt_slow_path(tmp == NULL)) { - goto fail; + goto out_free_tstr_state; } tmcf = nxt_mp_zget(tmp, sizeof(nxt_router_temp_conf_t)); if (nxt_slow_path(tmcf == NULL)) { - goto temp_fail; + goto out_free; } tmcf->mem_pool = tmp; @@ -1109,7 +1110,7 @@ nxt_router_temp_conf(nxt_task_t *task) tmcf->engines = nxt_array_create(tmcf->mem_pool, 4, sizeof(nxt_router_engine_conf_t)); if (nxt_slow_path(tmcf->engines == NULL)) { - goto temp_fail; + goto out_free; } nxt_queue_init(&creating_sockets); @@ -1131,16 +1132,18 @@ nxt_router_temp_conf(nxt_task_t *task) return tmcf; -temp_fail: +out_free: nxt_mp_destroy(tmp); -fail: +out_free_tstr_state: if (rtcf->tstr_state != NULL) { nxt_tstr_state_release(rtcf->tstr_state); } +out_free_mp: + nxt_mp_destroy(mp); return NULL; @@ -1410,7 +1413,7 @@ nxt_router_conf_send(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, static nxt_conf_map_t nxt_router_conf[] = { { - nxt_string("listeners_threads"), + nxt_string("listen_threads"), NXT_CONF_MAP_INT32, offsetof(nxt_router_conf_t, threads), }, @@ -1492,6 +1495,12 @@ static nxt_conf_map_t nxt_router_listener_conf[] = { NXT_CONF_MAP_STR_COPY, offsetof(nxt_router_listener_conf_t, application), }, + + { + nxt_string("backlog"), + NXT_CONF_MAP_INT32, + offsetof(nxt_router_listener_conf_t, backlog), + }, }; @@ -1573,6 +1582,12 @@ static nxt_conf_map_t nxt_router_http_conf[] = { NXT_CONF_MAP_INT8, offsetof(nxt_socket_conf_t, server_version), }, + + { + nxt_string("chunked_transform"), + NXT_CONF_MAP_INT8, + offsetof(nxt_socket_conf_t, chunked_transform), + }, }; @@ -1622,7 +1637,7 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_conf_value_t *js_module; #endif nxt_conf_value_t *root, *conf, *http, *value, *websocket; - nxt_conf_value_t *applications, *application; + nxt_conf_value_t *applications, *application, *settings; nxt_conf_value_t *listeners, *listener; nxt_socket_conf_t *skcf; nxt_router_conf_t *rtcf; @@ -1632,25 +1647,30 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_router_app_conf_t apcf; nxt_router_listener_conf_t lscf; - static nxt_str_t http_path = nxt_string("/settings/http"); - static nxt_str_t applications_path = nxt_string("/applications"); - static nxt_str_t listeners_path = nxt_string("/listeners"); - static nxt_str_t routes_path = nxt_string("/routes"); - static nxt_str_t access_log_path = nxt_string("/access_log"); + static const nxt_str_t settings_path = nxt_string("/settings"); + static const nxt_str_t http_path = nxt_string("/settings/http"); + static const nxt_str_t applications_path = nxt_string("/applications"); + static const nxt_str_t listeners_path = nxt_string("/listeners"); + static const nxt_str_t routes_path = nxt_string("/routes"); + static const nxt_str_t access_log_path = nxt_string("/access_log"); #if (NXT_TLS) - static nxt_str_t certificate_path = nxt_string("/tls/certificate"); - static nxt_str_t conf_commands_path = nxt_string("/tls/conf_commands"); - static nxt_str_t conf_cache_path = nxt_string("/tls/session/cache_size"); - static nxt_str_t conf_timeout_path = nxt_string("/tls/session/timeout"); - static nxt_str_t conf_tickets = nxt_string("/tls/session/tickets"); + static const nxt_str_t certificate_path = nxt_string("/tls/certificate"); + static const nxt_str_t conf_commands_path = + nxt_string("/tls/conf_commands"); + static const nxt_str_t conf_cache_path = + nxt_string("/tls/session/cache_size"); + static const nxt_str_t conf_timeout_path = + nxt_string("/tls/session/timeout"); + static const nxt_str_t conf_tickets = nxt_string("/tls/session/tickets"); #endif #if (NXT_HAVE_NJS) - static nxt_str_t js_module_path = nxt_string("/settings/js_module"); + static const nxt_str_t js_module_path = nxt_string("/settings/js_module"); #endif - static nxt_str_t static_path = nxt_string("/settings/http/static"); - static nxt_str_t websocket_path = nxt_string("/settings/http/websocket"); - static nxt_str_t forwarded_path = nxt_string("/forwarded"); - static nxt_str_t client_ip_path = nxt_string("/client_ip"); + static const nxt_str_t static_path = nxt_string("/settings/http/static"); + static const nxt_str_t websocket_path = + nxt_string("/settings/http/websocket"); + static const nxt_str_t forwarded_path = nxt_string("/forwarded"); + static const nxt_str_t client_ip_path = nxt_string("/client_ip"); root = nxt_conf_json_parse(tmcf->mem_pool, start, end, NULL); if (root == NULL) { @@ -1661,11 +1681,14 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, rtcf = tmcf->router_conf; mp = rtcf->mem_pool; - ret = nxt_conf_map_object(mp, root, nxt_router_conf, - nxt_nitems(nxt_router_conf), rtcf); - if (ret != NXT_OK) { - nxt_alert(task, "root map error"); - return NXT_ERROR; + settings = nxt_conf_get_path(root, &settings_path); + if (settings != NULL) { + ret = nxt_conf_map_object(mp, settings, nxt_router_conf, + nxt_nitems(nxt_router_conf), rtcf); + if (ret != NXT_OK) { + nxt_alert(task, "router_conf map error"); + return NXT_ERROR; + } } if (rtcf->threads == 0) { @@ -1952,13 +1975,10 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, break; } - skcf = nxt_router_socket_conf(task, tmcf, &name); - if (skcf == NULL) { - goto fail; - } - nxt_memzero(&lscf, sizeof(lscf)); + lscf.backlog = -1; + ret = nxt_conf_map_object(mp, listener, nxt_router_listener_conf, nxt_nitems(nxt_router_listener_conf), &lscf); @@ -1969,6 +1989,11 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_debug(task, "application: %V", &lscf.application); + skcf = nxt_router_socket_conf(task, tmcf, &name, lscf.backlog); + if (skcf == NULL) { + goto fail; + } + // STUB, default values if http block is not defined. skcf->header_buffer_size = 2048; skcf->large_header_buffer_size = 8192; @@ -1988,6 +2013,7 @@ nxt_router_conf_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, skcf->proxy_read_timeout = 30 * 1000; skcf->server_version = 1; + skcf->chunked_transform = 0; skcf->websocket_conf.max_frame_size = 1024 * 1024; skcf->websocket_conf.read_timeout = 60 * 1000; @@ -2281,7 +2307,7 @@ nxt_router_conf_process_static(nxt_task_t *task, nxt_router_conf_t *rtcf, nxt_uint_t exts; nxt_conf_value_t *mtypes_conf, *ext_conf, *value; - static nxt_str_t mtypes_path = nxt_string("/mime_types"); + static const nxt_str_t mtypes_path = nxt_string("/mime_types"); mp = rtcf->mem_pool; @@ -2358,11 +2384,11 @@ nxt_router_conf_forward(nxt_task_t *task, nxt_mp_t *mp, nxt_conf_value_t *conf) nxt_http_forward_t *forward; nxt_http_route_addr_rule_t *source; - static nxt_str_t header_path = nxt_string("/header"); - static nxt_str_t client_ip_path = nxt_string("/client_ip"); - static nxt_str_t protocol_path = nxt_string("/protocol"); - static nxt_str_t source_path = nxt_string("/source"); - static nxt_str_t recursive_path = nxt_string("/recursive"); + static const nxt_str_t header_path = nxt_string("/header"); + static const nxt_str_t client_ip_path = nxt_string("/client_ip"); + static const nxt_str_t protocol_path = nxt_string("/protocol"); + static const nxt_str_t source_path = nxt_string("/source"); + static const nxt_str_t recursive_path = nxt_string("/recursive"); header_conf = nxt_conf_get_path(conf, &header_path); @@ -2671,7 +2697,7 @@ nxt_router_application_init(nxt_router_conf_t *rtcf, nxt_str_t *name, static nxt_socket_conf_t * nxt_router_socket_conf(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, - nxt_str_t *name) + nxt_str_t *name, int backlog) { size_t size; nxt_int_t ret; @@ -2715,7 +2741,7 @@ nxt_router_socket_conf(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_listen_socket_remote_size(ls); ls->socket = -1; - ls->backlog = NXT_LISTEN_BACKLOG; + ls->backlog = backlog > -1 ? backlog : NXT_LISTEN_BACKLOG; ls->flags = NXT_NONBLOCK; ls->read_after_accept = 1; } @@ -2862,7 +2888,7 @@ nxt_router_listen_socket_ready(nxt_task_t *task, nxt_port_recv_msg_t *msg, nxt_socket_defer_accept(task, s, rpc->socket_conf->listen->sockaddr); - ret = nxt_listen_socket(task, s, NXT_LISTEN_BACKLOG); + ret = nxt_listen_socket(task, s, rpc->socket_conf->listen->backlog); if (nxt_slow_path(ret != NXT_OK)) { goto fail; } diff --git a/src/nxt_router.h b/src/nxt_router.h index 3e523001..06c6bb32 100644 --- a/src/nxt_router.h +++ b/src/nxt_router.h @@ -54,8 +54,7 @@ typedef struct { nxt_router_access_log_t *access_log; nxt_tstr_t *log_format; - nxt_tstr_t *log_expr; - uint8_t log_negate; /* 1 bit */ + nxt_tstr_cond_t log_cond; } nxt_router_conf_t; @@ -208,6 +207,7 @@ typedef struct { uint8_t discard_unsafe_fields; /* 1 bit */ uint8_t server_version; /* 1 bit */ + uint8_t chunked_transform; /* 1 bit */ nxt_http_forward_t *forwarded; nxt_http_forward_t *client_ip; diff --git a/src/nxt_router_access_log.c b/src/nxt_router_access_log.c index 7fc59972..afecd0b1 100644 --- a/src/nxt_router_access_log.c +++ b/src/nxt_router_access_log.c @@ -26,10 +26,8 @@ typedef struct { static void nxt_router_access_log_writer(nxt_task_t *task, nxt_http_request_t *r, nxt_router_access_log_t *access_log, nxt_tstr_t *format); -static void nxt_router_access_log_write_ready(nxt_task_t *task, void *obj, - void *data); -static void nxt_router_access_log_write_error(nxt_task_t *task, void *obj, - void *data); +static void nxt_router_access_log_write(nxt_task_t *task, nxt_http_request_t *r, + nxt_router_access_log_ctx_t *ctx); static void nxt_router_access_log_ready(nxt_task_t *task, nxt_port_recv_msg_t *msg, void *data); static void nxt_router_access_log_error(nxt_task_t *task, @@ -75,7 +73,7 @@ nxt_router_access_log_create(nxt_task_t *task, nxt_router_conf_t *rtcf, nxt_router_access_log_t *access_log; nxt_router_access_log_conf_t alcf; - static nxt_str_t log_format_str = nxt_string("$remote_addr - - " + static const nxt_str_t log_format_str = nxt_string("$remote_addr - - " "[$time_local] \"$request_line\" $status $body_bytes_sent " "\"$header_referer\" \"$header_user_agent\""); @@ -145,15 +143,8 @@ nxt_router_access_log_create(nxt_task_t *task, nxt_router_conf_t *rtcf, if (alcf.expr != NULL) { nxt_conf_get_string(alcf.expr, &str); - if (str.length > 0 && str.start[0] == '!') { - rtcf->log_negate = 1; - - str.start++; - str.length--; - } - - rtcf->log_expr = nxt_tstr_compile(rtcf->tstr_state, &str, 0); - if (nxt_slow_path(rtcf->log_expr == NULL)) { + ret = nxt_tstr_cond_compile(rtcf->tstr_state, &str, &rtcf->log_cond); + if (nxt_slow_path(ret != NXT_OK)) { return NXT_ERROR; } } @@ -180,8 +171,6 @@ nxt_router_access_log_writer(nxt_task_t *task, nxt_http_request_t *r, if (nxt_tstr_is_const(format)) { nxt_tstr_str(format, &ctx->text); - nxt_router_access_log_write_ready(task, r, ctx); - } else { rtcf = r->conf->socket_conf->router_conf; @@ -191,36 +180,26 @@ nxt_router_access_log_writer(nxt_task_t *task, nxt_http_request_t *r, return; } - nxt_tstr_query(task, r->tstr_query, format, &ctx->text); - nxt_tstr_query_resolve(task, r->tstr_query, ctx, - nxt_router_access_log_write_ready, - nxt_router_access_log_write_error); - } + ret = nxt_tstr_query(task, r->tstr_query, format, &ctx->text); + if (nxt_slow_path(ret != NXT_OK)) { + return; + } + } + + nxt_router_access_log_write(task, r, ctx); } static void -nxt_router_access_log_write_ready(nxt_task_t *task, void *obj, void *data) +nxt_router_access_log_write(nxt_task_t *task, nxt_http_request_t *r, + nxt_router_access_log_ctx_t *ctx) { - nxt_http_request_t *r; - nxt_router_access_log_ctx_t *ctx; - - r = obj; - ctx = data; - nxt_fd_write(ctx->access_log->fd, ctx->text.start, ctx->text.length); nxt_http_request_close_handler(task, r, r->proto.any); } -static void -nxt_router_access_log_write_error(nxt_task_t *task, void *obj, void *data) -{ - -} - - void nxt_router_access_log_open(nxt_task_t *task, nxt_router_temp_conf_t *tmcf) { diff --git a/src/nxt_runtime.c b/src/nxt_runtime.c index 0e7f879e..de76f19e 100644 --- a/src/nxt_runtime.c +++ b/src/nxt_runtime.c @@ -895,8 +895,7 @@ nxt_runtime_conf_init(nxt_task_t *task, nxt_runtime_t *rt) return NXT_ERROR; } - ret = mkdir((char *) file_name.start, S_IRWXU); - + ret = mkdir((char *) file_name.start, 0700); if (nxt_fast_path(ret == 0 || nxt_errno == EEXIST)) { rt->certs.length = file_name.len; rt->certs.start = file_name.start; @@ -912,8 +911,7 @@ nxt_runtime_conf_init(nxt_task_t *task, nxt_runtime_t *rt) return NXT_ERROR; } - ret = mkdir((char *) file_name.start, S_IRWXU); - + ret = mkdir((char *) file_name.start, 0700); if (nxt_fast_path(ret == 0 || nxt_errno == EEXIST)) { rt->scripts.length = file_name.len; rt->scripts.start = file_name.start; @@ -1490,7 +1488,7 @@ nxt_runtime_pid_file_create(nxt_task_t *task, nxt_file_name_t *pid_file) file.name = pid_file; - nxt_fs_mkdir_parent(pid_file, 0755); + nxt_fs_mkdir_p_dirname(pid_file, 0755); n = nxt_file_open(task, &file, O_WRONLY, O_CREAT | O_TRUNC, NXT_FILE_DEFAULT_ACCESS); diff --git a/src/nxt_script.c b/src/nxt_script.c index 70045a22..05d9561d 100644 --- a/src/nxt_script.c +++ b/src/nxt_script.c @@ -37,14 +37,6 @@ static void nxt_script_buf_completion(nxt_task_t *task, void *obj, void *data); static nxt_lvlhsh_t nxt_script_info; -static njs_vm_ops_t nxt_js_ops = { - NULL, - NULL, - nxt_js_module_loader, - NULL, -}; - - nxt_script_t * nxt_script_new(nxt_task_t *task, nxt_str_t *name, u_char *data, size_t size, u_char *error) @@ -63,8 +55,6 @@ nxt_script_new(nxt_task_t *task, nxt_str_t *name, u_char *data, size_t size, opts.file.start = (u_char *) "default"; opts.file.length = 7; - opts.ops = &nxt_js_ops; - vm = njs_vm_create(&opts); if (nxt_slow_path(vm == NULL)) { return NULL; diff --git a/src/nxt_sockaddr.c b/src/nxt_sockaddr.c index 32941893..4d1e723b 100644 --- a/src/nxt_sockaddr.c +++ b/src/nxt_sockaddr.c @@ -732,6 +732,11 @@ nxt_sockaddr_inet_parse(nxt_mp_t *mp, nxt_str_t *addr) length = p - addr->start; } + if (length == 0) { + nxt_thread_log_error(NXT_LOG_ERR, "invalid address \"%V\"", addr); + return NULL; + } + inaddr = INADDR_ANY; if (length != 1 || addr->start[0] != '*') { diff --git a/src/nxt_status.c b/src/nxt_status.c index f8002e86..92cbf2e6 100644 --- a/src/nxt_status.c +++ b/src/nxt_status.c @@ -6,40 +6,131 @@ #include <nxt_main.h> #include <nxt_conf.h> #include <nxt_status.h> +#include <nxt_application.h> nxt_conf_value_t * nxt_status_get(nxt_status_report_t *report, nxt_mp_t *mp) { - size_t i; - nxt_str_t name; - nxt_int_t ret; - nxt_status_app_t *app; - nxt_conf_value_t *status, *obj, *apps, *app_obj; - - static nxt_str_t conns_str = nxt_string("connections"); - static nxt_str_t acc_str = nxt_string("accepted"); - static nxt_str_t active_str = nxt_string("active"); - static nxt_str_t idle_str = nxt_string("idle"); - static nxt_str_t closed_str = nxt_string("closed"); - static nxt_str_t reqs_str = nxt_string("requests"); - static nxt_str_t total_str = nxt_string("total"); - static nxt_str_t apps_str = nxt_string("applications"); - static nxt_str_t procs_str = nxt_string("processes"); - static nxt_str_t run_str = nxt_string("running"); - static nxt_str_t start_str = nxt_string("starting"); - - status = nxt_conf_create_object(mp, 3); + size_t i, nr_langs; + uint16_t lang_cnts[NXT_APP_UNKNOWN] = { 1 }; + uint32_t idx = 0; + nxt_str_t name; + nxt_int_t ret; + nxt_array_t *langs; + nxt_thread_t *thr; + nxt_app_type_t type, prev_type; + nxt_status_app_t *app; + nxt_conf_value_t *status, *obj, *mods, *apps, *app_obj, *mod_obj; + nxt_app_lang_module_t *modules; + + static const nxt_str_t modules_str = nxt_string("modules"); + static const nxt_str_t version_str = nxt_string("version"); + static const nxt_str_t lib_str = nxt_string("lib"); + static const nxt_str_t conns_str = nxt_string("connections"); + static const nxt_str_t acc_str = nxt_string("accepted"); + static const nxt_str_t active_str = nxt_string("active"); + static const nxt_str_t idle_str = nxt_string("idle"); + static const nxt_str_t closed_str = nxt_string("closed"); + static const nxt_str_t reqs_str = nxt_string("requests"); + static const nxt_str_t total_str = nxt_string("total"); + static const nxt_str_t apps_str = nxt_string("applications"); + static const nxt_str_t procs_str = nxt_string("processes"); + static const nxt_str_t run_str = nxt_string("running"); + static const nxt_str_t start_str = nxt_string("starting"); + + status = nxt_conf_create_object(mp, 4); if (nxt_slow_path(status == NULL)) { return NULL; } + thr = nxt_thread(); + langs = thr->runtime->languages; + + modules = langs->elts; + /* + * We need to count the number of unique languages to correctly + * allocate the below mods object. + * + * We also need to count how many of each language. + * + * Start by skipping past NXT_APP_EXTERNAL which is always the + * first entry. + */ + for (i = 1, nr_langs = 0, prev_type = NXT_APP_UNKNOWN; i < langs->nelts; + i++) + { + type = modules[i].type; + + lang_cnts[type]++; + + if (type == prev_type) { + continue; + } + + nr_langs++; + prev_type = type; + } + + mods = nxt_conf_create_object(mp, nr_langs); + if (nxt_slow_path(mods == NULL)) { + return NULL; + } + + nxt_conf_set_member(status, &modules_str, mods, idx++); + + i = 1; + obj = mod_obj = NULL; + prev_type = NXT_APP_UNKNOWN; + for (size_t l = 0, a = 0; i < langs->nelts; i++) { + nxt_str_t item, mod_name; + + type = modules[i].type; + if (type != prev_type) { + a = 0; + + if (lang_cnts[type] == 1) { + mod_obj = nxt_conf_create_object(mp, 2); + obj = mod_obj; + } else { + mod_obj = nxt_conf_create_array(mp, lang_cnts[type]); + } + + if (nxt_slow_path(mod_obj == NULL)) { + return NULL; + } + + mod_name.start = (u_char *)modules[i].name; + mod_name.length = strlen(modules[i].name); + nxt_conf_set_member(mods, &mod_name, mod_obj, l++); + } + + if (lang_cnts[type] > 1) { + obj = nxt_conf_create_object(mp, 2); + if (nxt_slow_path(obj == NULL)) { + return NULL; + } + + nxt_conf_set_element(mod_obj, a++, obj); + } + + item.start = modules[i].version; + item.length = nxt_strlen(modules[i].version); + nxt_conf_set_member_string(obj, &version_str, &item, 0); + + item.start = (u_char *)modules[i].file; + item.length = strlen(modules[i].file); + nxt_conf_set_member_string(obj, &lib_str, &item, 1); + + prev_type = type; + } + obj = nxt_conf_create_object(mp, 4); if (nxt_slow_path(obj == NULL)) { return NULL; } - nxt_conf_set_member(status, &conns_str, obj, 0); + nxt_conf_set_member(status, &conns_str, obj, idx++); nxt_conf_set_member_integer(obj, &acc_str, report->accepted_conns, 0); nxt_conf_set_member_integer(obj, &active_str, report->accepted_conns @@ -53,7 +144,7 @@ nxt_status_get(nxt_status_report_t *report, nxt_mp_t *mp) return NULL; } - nxt_conf_set_member(status, &reqs_str, obj, 1); + nxt_conf_set_member(status, &reqs_str, obj, idx++); nxt_conf_set_member_integer(obj, &total_str, report->requests, 0); @@ -62,7 +153,7 @@ nxt_status_get(nxt_status_report_t *report, nxt_mp_t *mp) return NULL; } - nxt_conf_set_member(status, &apps_str, apps, 2); + nxt_conf_set_member(status, &apps_str, apps, idx++); for (i = 0; i < report->apps_count; i++) { app = &report->apps[i]; diff --git a/src/nxt_tstr.c b/src/nxt_tstr.c index edf6860a..50df4c47 100644 --- a/src/nxt_tstr.c +++ b/src/nxt_tstr.c @@ -36,14 +36,8 @@ struct nxt_tstr_query_s { nxt_tstr_state_t *state; nxt_tstr_cache_t *cache; - nxt_uint_t waiting; - nxt_uint_t failed; /* 1 bit */ - void *ctx; void *data; - - nxt_work_handler_t ready; - nxt_work_handler_t error; }; @@ -81,7 +75,7 @@ nxt_tstr_state_new(nxt_mp_t *mp, nxt_bool_t test) nxt_tstr_t * -nxt_tstr_compile(nxt_tstr_state_t *state, nxt_str_t *str, +nxt_tstr_compile(nxt_tstr_state_t *state, const nxt_str_t *str, nxt_tstr_flags_t flags) { u_char *p; @@ -159,7 +153,7 @@ nxt_tstr_test(nxt_tstr_state_t *state, nxt_str_t *str, u_char *error) #else nxt_sprintf(error, error + NXT_MAX_ERROR_STR, "Unit is built without support of njs: " - "\"--njs\" ./configure option is missing."); + "\"--njs\" ./configure option is missing.%Z"); return NXT_ERROR; #endif @@ -202,6 +196,26 @@ nxt_tstr_state_release(nxt_tstr_state_t *state) } +nxt_int_t +nxt_tstr_cond_compile(nxt_tstr_state_t *state, nxt_str_t *str, + nxt_tstr_cond_t *cond) +{ + if (str->length > 0 && str->start[0] == '!') { + cond->negate = 1; + + str->start++; + str->length--; + } + + cond->expr = nxt_tstr_compile(state, str, 0); + if (nxt_slow_path(cond->expr == NULL)) { + return NXT_ERROR; + } + + return NXT_OK; +} + + nxt_bool_t nxt_tstr_is_const(nxt_tstr_t *tstr) { @@ -246,7 +260,7 @@ nxt_tstr_query_init(nxt_tstr_query_t **query_p, nxt_tstr_state_t *state, } -void +nxt_int_t nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, nxt_str_t *val) { @@ -254,11 +268,7 @@ nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, if (nxt_tstr_is_const(tstr)) { nxt_tstr_str(tstr, val); - return; - } - - if (nxt_slow_path(query->failed)) { - return; + return NXT_OK; } if (tstr->type == NXT_TSTR_VAR) { @@ -267,8 +277,7 @@ nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, tstr->flags & NXT_TSTR_LOGGING); if (nxt_slow_path(ret != NXT_OK)) { - query->failed = 1; - return; + return NXT_ERROR; } } else { @@ -277,8 +286,7 @@ nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, tstr->u.js, val, query->ctx); if (nxt_slow_path(ret != NXT_OK)) { - query->failed = 1; - return; + return NXT_ERROR; } #endif } @@ -294,43 +302,8 @@ nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, nxt_debug(task, "tstr query: \"%V\", result: \"%V\"", &str, val); #endif -} - - -nxt_bool_t -nxt_tstr_query_failed(nxt_tstr_query_t *query) -{ - return query->failed; -} - -void -nxt_tstr_query_resolve(nxt_task_t *task, nxt_tstr_query_t *query, void *data, - nxt_work_handler_t ready, nxt_work_handler_t error) -{ - query->data = data; - query->ready = ready; - query->error = error; - - if (query->waiting == 0) { - nxt_work_queue_add(&task->thread->engine->fast_work_queue, - query->failed ? query->error : query->ready, - task, query->ctx, query->data); - } -} - - -void -nxt_tstr_query_handle(nxt_task_t *task, nxt_tstr_query_t *query, - nxt_bool_t failed) -{ - query->failed |= failed; - - if (--query->waiting == 0) { - nxt_work_queue_add(&task->thread->engine->fast_work_queue, - query->failed ? query->error : query->ready, - task, query->ctx, query->data); - } + return NXT_OK; } diff --git a/src/nxt_tstr.h b/src/nxt_tstr.h index 3e842f81..aca74e20 100644 --- a/src/nxt_tstr.h +++ b/src/nxt_tstr.h @@ -37,12 +37,20 @@ typedef enum { } nxt_tstr_flags_t; +typedef struct { + nxt_tstr_t *expr; + uint8_t negate; /* 1 bit */ +} nxt_tstr_cond_t; + + nxt_tstr_state_t *nxt_tstr_state_new(nxt_mp_t *mp, nxt_bool_t test); -nxt_tstr_t *nxt_tstr_compile(nxt_tstr_state_t *state, nxt_str_t *str, +nxt_tstr_t *nxt_tstr_compile(nxt_tstr_state_t *state, const nxt_str_t *str, nxt_tstr_flags_t flags); nxt_int_t nxt_tstr_test(nxt_tstr_state_t *state, nxt_str_t *str, u_char *error); nxt_int_t nxt_tstr_state_done(nxt_tstr_state_t *state, u_char *error); void nxt_tstr_state_release(nxt_tstr_state_t *state); +nxt_int_t nxt_tstr_cond_compile(nxt_tstr_state_t *state, nxt_str_t *str, + nxt_tstr_cond_t *cond); nxt_bool_t nxt_tstr_is_const(nxt_tstr_t *tstr); void nxt_tstr_str(nxt_tstr_t *tstr, nxt_str_t *str); @@ -50,13 +58,8 @@ void nxt_tstr_str(nxt_tstr_t *tstr, nxt_str_t *str); nxt_int_t nxt_tstr_query_init(nxt_tstr_query_t **query_p, nxt_tstr_state_t *state, nxt_tstr_cache_t *cache, void *ctx, nxt_mp_t *mp); -void nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, nxt_tstr_t *tstr, - nxt_str_t *val); -nxt_bool_t nxt_tstr_query_failed(nxt_tstr_query_t *query); -void nxt_tstr_query_resolve(nxt_task_t *task, nxt_tstr_query_t *query, - void *data, nxt_work_handler_t ready, nxt_work_handler_t error); -void nxt_tstr_query_handle(nxt_task_t *task, nxt_tstr_query_t *query, - nxt_bool_t failed); +nxt_int_t nxt_tstr_query(nxt_task_t *task, nxt_tstr_query_t *query, + nxt_tstr_t *tstr, nxt_str_t *val); void nxt_tstr_query_release(nxt_tstr_query_t *query); diff --git a/src/nxt_types.h b/src/nxt_types.h index 03e9c187..723346d9 100644 --- a/src/nxt_types.h +++ b/src/nxt_types.h @@ -51,7 +51,6 @@ typedef off_t nxt_off_t; * 64-bit on 32-bit NetBSD 6.0; * 32-bit on 64-bit OpenBSD; * 64-bit in Linux x32 ABI; - * 64-bit in 32-bit Visual Studio C++ 2005. */ #if (NXT_QNX) /* diff --git a/src/nxt_unit.c b/src/nxt_unit.c index b6291b2d..966a6c0f 100644 --- a/src/nxt_unit.c +++ b/src/nxt_unit.c @@ -1483,9 +1483,9 @@ nxt_unit_request_check_response_port(nxt_unit_request_info_t *req, pthread_mutex_lock(&lib->mutex); port = nxt_unit_port_hash_find(&lib->ports, port_id, 0); - port_impl = nxt_container_of(port, nxt_unit_port_impl_t, port); if (nxt_fast_path(port != NULL)) { + port_impl = nxt_container_of(port, nxt_unit_port_impl_t, port); req->response_port = port; if (nxt_fast_path(port_impl->ready)) { @@ -1948,9 +1948,9 @@ nxt_unit_request_group_dup_fields(nxt_unit_request_info_t *req) nxt_unit_field_t *fields, f; nxt_unit_request_t *r; - static nxt_str_t content_length = nxt_string("content-length"); - static nxt_str_t content_type = nxt_string("content-type"); - static nxt_str_t cookie = nxt_string("cookie"); + static const nxt_str_t content_length = nxt_string("content-length"); + static const nxt_str_t content_type = nxt_string("content-type"); + static const nxt_str_t cookie = nxt_string("cookie"); nxt_unit_req_debug(req, "group_dup_fields"); @@ -3502,6 +3502,10 @@ nxt_unit_mmap_get(nxt_unit_ctx_t *ctx, nxt_unit_port_t *port, pthread_mutex_lock(&lib->outgoing.mutex); + if (nxt_slow_path(lib->outgoing.elts == NULL)) { + goto skip; + } + retry: outgoing_size = lib->outgoing.size; @@ -3598,6 +3602,8 @@ retry: goto retry; } +skip: + *c = 0; hdr = nxt_unit_new_mmap(ctx, port, *n); @@ -3851,7 +3857,7 @@ nxt_unit_shm_open(nxt_unit_ctx_t *ctx, size_t size) #elif (NXT_HAVE_SHM_OPEN_ANON) - fd = shm_open(SHM_ANON, O_RDWR, S_IRUSR | S_IWUSR); + fd = shm_open(SHM_ANON, O_RDWR, 0600); if (nxt_slow_path(fd == -1)) { nxt_unit_alert(ctx, "shm_open(SHM_ANON) failed: %s (%d)", strerror(errno), errno); @@ -3864,7 +3870,7 @@ nxt_unit_shm_open(nxt_unit_ctx_t *ctx, size_t size) /* Just in case. */ shm_unlink(name); - fd = shm_open(name, O_CREAT | O_EXCL | O_RDWR, S_IRUSR | S_IWUSR); + fd = shm_open(name, O_CREAT | O_EXCL | O_RDWR, 0600); if (nxt_slow_path(fd == -1)) { nxt_unit_alert(ctx, "shm_open(%s) failed: %s (%d)", name, strerror(errno), errno); diff --git a/src/nxt_upstream.c b/src/nxt_upstream.c index 17593173..c92d4a50 100644 --- a/src/nxt_upstream.c +++ b/src/nxt_upstream.c @@ -25,7 +25,7 @@ nxt_upstreams_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_upstreams_t *upstreams; nxt_conf_value_t *upstreams_conf, *upcf; - static nxt_str_t upstreams_name = nxt_string("upstreams"); + static const nxt_str_t upstreams_name = nxt_string("upstreams"); upstreams_conf = nxt_conf_get_object_member(conf, &upstreams_name, NULL); diff --git a/src/nxt_upstream.h b/src/nxt_upstream.h index afc53774..0c3b4c8b 100644 --- a/src/nxt_upstream.h +++ b/src/nxt_upstream.h @@ -52,7 +52,7 @@ struct nxt_upstream_s { struct nxt_upstreams_s { uint32_t items; - nxt_upstream_t upstream[0]; + nxt_upstream_t upstream[]; }; diff --git a/src/nxt_upstream_round_robin.c b/src/nxt_upstream_round_robin.c index 31e2f48a..96c3e44e 100644 --- a/src/nxt_upstream_round_robin.c +++ b/src/nxt_upstream_round_robin.c @@ -23,7 +23,7 @@ struct nxt_upstream_round_robin_server_s { struct nxt_upstream_round_robin_s { uint32_t items; - nxt_upstream_round_robin_server_t server[0]; + nxt_upstream_round_robin_server_t server[]; }; @@ -52,8 +52,8 @@ nxt_upstream_round_robin_create(nxt_task_t *task, nxt_router_temp_conf_t *tmcf, nxt_conf_value_t *servers_conf, *srvcf, *wtcf; nxt_upstream_round_robin_t *urr; - static nxt_str_t servers = nxt_string("servers"); - static nxt_str_t weight = nxt_string("weight"); + static const nxt_str_t servers = nxt_string("servers"); + static const nxt_str_t weight = nxt_string("weight"); mp = tmcf->router_conf->mem_pool; diff --git a/src/nxt_var.c b/src/nxt_var.c index 57110f66..16aa1c7e 100644 --- a/src/nxt_var.c +++ b/src/nxt_var.c @@ -30,14 +30,8 @@ struct nxt_var_query_s { nxt_var_cache_t cache; - nxt_uint_t waiting; - nxt_uint_t failed; /* 1 bit */ - void *ctx; void *data; - - nxt_work_handler_t ready; - nxt_work_handler_t error; }; @@ -566,7 +560,7 @@ nxt_var_interpreter(nxt_task_t *task, nxt_tstr_state_t *state, } if (last != var->length) { - p = nxt_cpymem(p, &src[last], var->length - last); + nxt_cpymem(p, &src[last], var->length - last); } return NXT_OK; diff --git a/src/nxt_vector.c b/src/nxt_vector.c deleted file mode 100644 index 4737248c..00000000 --- a/src/nxt_vector.c +++ /dev/null @@ -1,156 +0,0 @@ - -/* - * Copyright (C) Igor Sysoev - * Copyright (C) NGINX, Inc. - */ - -#include <nxt_main.h> - - -nxt_vector_t * -nxt_vector_create(nxt_uint_t items, size_t item_size, - const nxt_mem_proto_t *proto, void *pool) -{ - nxt_vector_t *vector; - - vector = proto->alloc(pool, sizeof(nxt_vector_t) + items * item_size); - - if (nxt_fast_path(vector != NULL)) { - vector->start = nxt_pointer_to(vector, sizeof(nxt_vector_t)); - vector->items = 0; - vector->item_size = item_size; - vector->avalaible = items; - vector->type = NXT_VECTOR_EMBEDDED; - } - - return vector; -} - - -void * -nxt_vector_init(nxt_vector_t *vector, nxt_uint_t items, size_t item_size, - const nxt_mem_proto_t *proto, void *pool) -{ - vector->start = proto->alloc(pool, items * item_size); - - if (nxt_fast_path(vector->start != NULL)) { - vector->items = 0; - vector->item_size = item_size; - vector->avalaible = items; - vector->type = NXT_VECTOR_INITED; - } - - return vector->start; -} - - -void -nxt_vector_destroy(nxt_vector_t *vector, const nxt_mem_proto_t *proto, - void *pool) -{ - switch (vector->type) { - - case NXT_VECTOR_INITED: - proto->free(pool, vector->start); -#if (NXT_DEBUG) - vector->start = NULL; - vector->items = 0; - vector->avalaible = 0; -#endif - break; - - case NXT_VECTOR_DESCRETE: - proto->free(pool, vector->start); - - /* Fall through. */ - - case NXT_VECTOR_EMBEDDED: - proto->free(pool, vector); - break; - } -} - - -void * -nxt_vector_add(nxt_vector_t *vector, const nxt_mem_proto_t *proto, void *pool) -{ - void *item, *start, *old; - size_t size; - uint32_t n; - - n = vector->avalaible; - - if (n == vector->items) { - - if (n < 16) { - /* Allocate new vector twice as much as current. */ - n *= 2; - - } else { - /* Allocate new vector half as much as current. */ - n += n / 2; - } - - size = n * vector->item_size; - - start = proto->alloc(pool, size); - if (nxt_slow_path(start == NULL)) { - return NULL; - } - - vector->avalaible = n; - old = vector->start; - vector->start = start; - - nxt_memcpy(start, old, size); - - if (vector->type == NXT_VECTOR_EMBEDDED) { - vector->type = NXT_VECTOR_DESCRETE; - - } else { - proto->free(pool, old); - } - } - - item = nxt_pointer_to(vector->start, vector->item_size * vector->items); - - vector->items++; - - return item; -} - - -void * -nxt_vector_zero_add(nxt_vector_t *vector, const nxt_mem_proto_t *proto, - void *pool) -{ - void *item; - - item = nxt_vector_add(vector, proto, pool); - - if (nxt_fast_path(item != NULL)) { - nxt_memzero(item, vector->item_size); - } - - return item; -} - - -void -nxt_vector_remove(nxt_vector_t *vector, void *item) -{ - u_char *next, *last, *end; - uint32_t item_size; - - item_size = vector->item_size; - end = nxt_pointer_to(vector->start, item_size * vector->items); - last = end - item_size; - - if (item != last) { - next = nxt_pointer_to(item, item_size); - - nxt_memmove(item, next, end - next); - } - - vector->items--; -} diff --git a/src/nxt_vector.h b/src/nxt_vector.h deleted file mode 100644 index dcac53d4..00000000 --- a/src/nxt_vector.h +++ /dev/null @@ -1,65 +0,0 @@ - -/* - * Copyright (C) Igor Sysoev - * Copyright (C) NGINX, Inc. - */ - -#ifndef _NXT_VECTOR_H_INCLUDED_ -#define _NXT_VECTOR_H_INCLUDED_ - - -typedef enum { - NXT_VECTOR_INITED = 0, - NXT_VECTOR_DESCRETE, - NXT_VECTOR_EMBEDDED, -} nxt_vector_type_t; - - -typedef struct { - void *start; - /* - * A vector can hold no more than 65536 items. - * The item size is no more than 64K. - */ - uint16_t items; - uint16_t avalaible; - uint16_t item_size; - nxt_vector_type_t type:8; -} nxt_vector_t; - - -NXT_EXPORT nxt_vector_t *nxt_vector_create(nxt_uint_t items, size_t item_size, - const nxt_mem_proto_t *proto, void *pool); -NXT_EXPORT void *nxt_vector_init(nxt_vector_t *vector, nxt_uint_t items, - size_t item_size, const nxt_mem_proto_t *proto, void *pool); -NXT_EXPORT void nxt_vector_destroy(nxt_vector_t *vector, - const nxt_mem_proto_t *proto, void *pool); -NXT_EXPORT void *nxt_vector_add(nxt_vector_t *vector, - const nxt_mem_proto_t *proto, void *pool); -NXT_EXPORT void *nxt_vector_zero_add(nxt_vector_t *vector, - const nxt_mem_proto_t *proto, void *pool); -NXT_EXPORT void nxt_vector_remove(nxt_vector_t *vector, void *item); - - -#define nxt_vector_last(vector) \ - nxt_pointer_to((vector)->start, \ - (vector)->item_size * ((vector)->items - 1)) - - -#define nxt_vector_reset(vector) \ - (vector)->items = 0; - - -#define nxt_vector_is_empty(vector) \ - ((vector)->items == 0) - - -nxt_inline void * -nxt_vector_remove_last(nxt_vector_t *vector) -{ - vector->items--; - return nxt_pointer_to(vector->start, vector->item_size * vector->items); -} - - -#endif /* _NXT_VECTOR_H_INCLUDED_ */ diff --git a/src/perl/nxt_perl_psgi.c b/src/perl/nxt_perl_psgi.c index 807d1741..271494ac 100644 --- a/src/perl/nxt_perl_psgi.c +++ b/src/perl/nxt_perl_psgi.c @@ -407,7 +407,7 @@ nxt_perl_psgi_module_create(const char *script) char *buf, *p; size_t length; - static nxt_str_t prefix = nxt_string( + static const nxt_str_t prefix = nxt_string( "package NGINX::Unit::Sandbox;" "sub new {" " return bless {}, $_[0];" @@ -415,7 +415,7 @@ nxt_perl_psgi_module_create(const char *script) "{my $app = do \"" ); - static nxt_str_t suffix = nxt_string_zero( + static const nxt_str_t suffix = nxt_string_zero( "\";" "unless ($app) {" " if($@ || $1) {die $@ || $1}" diff --git a/src/python/nxt_python.c b/src/python/nxt_python.c index 7c059649..143d8d5d 100644 --- a/src/python/nxt_python.c +++ b/src/python/nxt_python.c @@ -403,11 +403,13 @@ nxt_python_set_target(nxt_task_t *task, nxt_python_target_t *target, char *callable, *module_name; PyObject *module, *obj; nxt_str_t str; + nxt_bool_t is_factory = 0; nxt_conf_value_t *value; - static nxt_str_t module_str = nxt_string("module"); - static nxt_str_t callable_str = nxt_string("callable"); - static nxt_str_t prefix_str = nxt_string("prefix"); + static const nxt_str_t module_str = nxt_string("module"); + static const nxt_str_t callable_str = nxt_string("callable"); + static const nxt_str_t prefix_str = nxt_string("prefix"); + static const nxt_str_t factory_flag_str = nxt_string("factory"); module = obj = NULL; @@ -449,7 +451,31 @@ nxt_python_set_target(nxt_task_t *task, nxt_python_target_t *target, goto fail; } - if (nxt_slow_path(PyCallable_Check(obj) == 0)) { + value = nxt_conf_get_object_member(conf, &factory_flag_str, NULL); + if (value != NULL) { + is_factory = nxt_conf_get_boolean(value); + } + + if (is_factory) { + if (nxt_slow_path(PyCallable_Check(obj) == 0)) { + nxt_alert(task, + "factory \"%s\" in module \"%s\" " + "can not be called to fetch callable", + callable, module_name); + Py_INCREF(obj); /* borrowed reference */ + goto fail; + } + + obj = PyObject_CallObject(obj, NULL); + if (nxt_slow_path(PyCallable_Check(obj) == 0)) { + nxt_alert(task, + "factory \"%s\" in module \"%s\" " + "did not return callable object", + callable, module_name); + goto fail; + } + + } else if (nxt_slow_path(PyCallable_Check(obj) == 0)) { nxt_alert(task, "\"%s\" in module \"%s\" is not a callable object", callable, module_name); goto fail; diff --git a/src/python/nxt_python.h b/src/python/nxt_python.h index 37e6265e..f5154514 100644 --- a/src/python/nxt_python.h +++ b/src/python/nxt_python.h @@ -48,7 +48,7 @@ typedef struct { typedef struct { nxt_int_t count; - nxt_python_target_t target[0]; + nxt_python_target_t target[]; } nxt_python_targets_t; diff --git a/src/test/nxt_clone_test.c b/src/test/nxt_clone_test.c index 64b9ddea..1a864f0e 100644 --- a/src/test/nxt_clone_test.c +++ b/src/test/nxt_clone_test.c @@ -560,9 +560,9 @@ nxt_clone_test_parse_map(nxt_task_t *task, nxt_str_t *map_str, nxt_runtime_t *rt; nxt_conf_value_t *array, *obj, *value; - static nxt_str_t host_name = nxt_string("host"); - static nxt_str_t cont_name = nxt_string("container"); - static nxt_str_t size_name = nxt_string("size"); + static const nxt_str_t host_name = nxt_string("host"); + static const nxt_str_t cont_name = nxt_string("container"); + static const nxt_str_t size_name = nxt_string("size"); rt = task->thread->runtime; diff --git a/src/test/nxt_http_parse_test.c b/src/test/nxt_http_parse_test.c index 5f1a518c..474b3f8d 100644 --- a/src/test/nxt_http_parse_test.c +++ b/src/test/nxt_http_parse_test.c @@ -762,7 +762,7 @@ nxt_http_parse_test_request_line(nxt_http_request_parse_t *rp, return NXT_ERROR; } - if (rp->complex_target != (test->complex_target | test->quoted_target)) { + if (rp->complex_target != test->complex_target) { nxt_log_alert(log, "http parse test case failed:\n" " - request:\n\"%V\"\n" " - complex_target: %d (expected: %d)", @@ -770,7 +770,6 @@ nxt_http_parse_test_request_line(nxt_http_request_parse_t *rp, return NXT_ERROR; } -#if 0 if (rp->quoted_target != test->quoted_target) { nxt_log_alert(log, "http parse test case failed:\n" " - request:\n\"%V\"\n" @@ -779,6 +778,7 @@ nxt_http_parse_test_request_line(nxt_http_request_parse_t *rp, return NXT_ERROR; } +#if 0 if (rp->space_in_target != test->space_in_target) { nxt_log_alert(log, "http parse test case failed:\n" " - request:\n\"%V\"\n" diff --git a/src/wasm-wasi-component/Cargo.lock b/src/wasm-wasi-component/Cargo.lock index bc09e96a..6b8c7cba 100644 --- a/src/wasm-wasi-component/Cargo.lock +++ b/src/wasm-wasi-component/Cargo.lock @@ -8,7 +8,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ - "gimli", + "gimli 0.28.1", ] [[package]] @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "once_cell", @@ -93,20 +93,11 @@ dependencies = [ "cfg-if", "libc", "miniz_oxide", - "object", + "object 0.32.2", "rustc-demangle", ] [[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] name = "bindgen" version = "0.68.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -155,9 +146,9 @@ checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "cap-fs-ext" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88e341d15ac1029aadce600be764a1a1edafe40e03cde23285bc1d261b3a4866" +checksum = "eb23061fc1c4ead4e45ca713080fe768e6234e959f5a5c399c39eb41aa34e56e" dependencies = [ "cap-primitives", "cap-std", @@ -167,9 +158,9 @@ dependencies = [ [[package]] name = "cap-net-ext" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "434168fe6533055f0f4204039abe3ff6d7db338ef46872a5fa39e9d5ad5ab7a9" +checksum = "f83ae11f116bcbafc5327c6af250341db96b5930046732e1905f7dc65887e0e1" dependencies = [ "cap-primitives", "cap-std", @@ -179,9 +170,9 @@ dependencies = [ [[package]] name = "cap-primitives" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe16767ed8eee6d3f1f00d6a7576b81c226ab917eb54b96e5f77a5216ef67abb" +checksum = "6d00bd8d26c4270d950eaaa837387964a2089a1c3c349a690a1fa03221d29531" dependencies = [ "ambient-authority", "fs-set-times", @@ -196,9 +187,9 @@ dependencies = [ [[package]] name = "cap-rand" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20e5695565f0cd7106bc3c7170323597540e772bb73e0be2cd2c662a0f8fa4ca" +checksum = "dbcb16a619d8b8211ed61f42bd290d2a1ac71277a69cf8417ec0996fa92f5211" dependencies = [ "ambient-authority", "rand", @@ -206,9 +197,9 @@ dependencies = [ [[package]] name = "cap-std" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "593db20e4c51f62d3284bae7ee718849c3214f93a3b94ea1899ad85ba119d330" +checksum = "19eb8e3d71996828751c1ed3908a439639752ac6bdc874e41469ef7fc15fbd7f" dependencies = [ "cap-primitives", "io-extras", @@ -218,9 +209,9 @@ dependencies = [ [[package]] name = "cap-time-ext" -version = "2.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03261630f291f425430a36f38c847828265bc928f517cdd2004c56f4b02f002b" +checksum = "61142dc51e25b7acc970ca578ce2c3695eac22bbba46c1073f5f583e78957725" dependencies = [ "ambient-authority", "cap-primitives", @@ -266,6 +257,12 @@ dependencies = [ ] [[package]] +name = "cobs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" + +[[package]] name = "core-foundation-sys" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -273,73 +270,86 @@ checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cranelift-bforest" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d819feeda4c420a18f1e28236ca0ce1177b22bf7c8a44ddee92dfe40de15bcf0" +checksum = "b80c3a50b9c4c7e5b5f73c0ed746687774fc9e36ef652b110da8daebf0c6e0e6" dependencies = [ "cranelift-entity", ] [[package]] +name = "cranelift-bitset" +version = "0.111.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38778758c2ca918b05acb2199134e0c561fb577c50574259b26190b6c2d95ded" +dependencies = [ + "serde", + "serde_derive", +] + +[[package]] name = "cranelift-codegen" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9b8d03d5bdbca7e5f72b0e0a0f69933ed1f09e24be6c075aa6fe3f802b0cc0c" +checksum = "58258667ad10e468bfc13a8d620f50dfcd4bb35d668123e97defa2549b9ad397" dependencies = [ "bumpalo", "cranelift-bforest", + "cranelift-bitset", "cranelift-codegen-meta", "cranelift-codegen-shared", "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.29.0", "hashbrown 0.14.3", "log", "regalloc2", + "rustc-hash", "smallvec", "target-lexicon", ] [[package]] name = "cranelift-codegen-meta" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3fd3664e38e51649b17dc30cfdd561273fe2f590dcd013fb75d9eabc6272dfb" +checksum = "043f0b702e529dcb07ff92bd7d40e7d5317b5493595172c5eb0983343751ee06" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b031ec5e605828975952622b5a77d49126f20ffe88d33719a0af66b23a0fc36" +checksum = "7763578888ab53eca5ce7da141953f828e82c2bfadcffc106d10d1866094ffbb" [[package]] name = "cranelift-control" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fada054d017cf2ed8f7ed2336e0517fc1b19e6825be1790de9eb00c94788362b" +checksum = "32db15f08c05df570f11e8ab33cb1ec449a64b37c8a3498377b77650bef33d8b" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177b6f94ae8de6348eb45bf977c79ab9e3c40fc3ac8cb7ed8109560ea39bee7d" +checksum = "5289cdb399381a27e7bbfa1b42185916007c3d49aeef70b1d01cb4caa8010130" dependencies = [ + "cranelift-bitset", "serde", "serde_derive", ] [[package]] name = "cranelift-frontend" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebebd23a69a23e3ddea78e98ff3a2de222e88c8e045d81ef4a72f042e0d79dbd" +checksum = "31ba8ab24eb9470477e98ddfa3c799a649ac5a0d9a2042868c4c952133c234e8" dependencies = [ "cranelift-codegen", "log", @@ -349,15 +359,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1571bfc14df8966d12c6121b5325026591a4b4009e22fea0fe3765ab7cd33b96" +checksum = "2b72a3c5c166a70426dcb209bdd0bb71a787c1ea76023dc0974fbabca770e8f9" [[package]] name = "cranelift-native" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35a69c37e0c10b46fe5527f2397ac821046efbf5f7ec112c8b84df25712f465b" +checksum = "46a42424c956bbc31fc5c2706073df896156c5420ae8fa2a5d48dbc7b295d71b" dependencies = [ "cranelift-codegen", "libc", @@ -366,9 +376,9 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.104.0" +version = "0.111.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b3fef8bbceb8cb56d3f1778b0418d75c5cf12ec571a35fc01eb41abb0227a25" +checksum = "49778df4289933d735b93c30a345513e030cf83101de0036e19b760f8aa09f68" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -376,7 +386,7 @@ dependencies = [ "itertools", "log", "smallvec", - "wasmparser 0.118.1", + "wasmparser", "wasmtime-types", ] @@ -416,6 +426,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + +[[package]] +name = "embedded-io" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" + +[[package]] name = "encoding_rs" version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -491,6 +513,7 @@ checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -514,12 +537,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] name = "futures-io" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "futures-sink" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -537,11 +582,16 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ + "futures-channel", "futures-core", + "futures-io", + "futures-macro", "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", + "slab", ] [[package]] @@ -560,6 +610,12 @@ name = "gimli" version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" dependencies = [ "fallible-iterator", "indexmap", @@ -574,9 +630,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "h2" -version = "0.4.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" dependencies = [ "bytes", "fnv", @@ -607,6 +663,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "ahash", + "serde", ] [[package]] @@ -678,9 +735,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.1.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", @@ -692,6 +749,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", + "smallvec", "tokio", "want", ] @@ -770,9 +828,9 @@ checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "itertools" -version = "0.10.5" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] @@ -827,6 +885,12 @@ dependencies = [ ] [[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] name = "libredox" version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -850,10 +914,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] -name = "mach" -version = "0.3.2" +name = "mach2" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" dependencies = [ "libc", ] @@ -880,15 +944,6 @@ dependencies = [ ] [[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - -[[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -905,9 +960,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "wasi", @@ -940,6 +995,15 @@ version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ + "memchr", +] + +[[package]] +name = "object" +version = "0.36.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +dependencies = [ "crc32fast", "hashbrown 0.14.3", "indexmap", @@ -983,6 +1047,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] +name = "postcard" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f7f0a8d620d71c457dd1d47df76bb18960378da56af4527aaa10f515eee732e" +dependencies = [ + "cobs", + "embedded-io 0.4.0", + "embedded-io 0.6.1", + "serde", +] + +[[package]] name = "ppv-lite86" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1145,9 +1221,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" -version = "0.38.31" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ "bitflags 2.4.2", "errno", @@ -1160,23 +1236,32 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.10" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" dependencies = [ "log", "ring", + "rustls-pki-types", "rustls-webpki", - "sct", + "subtle", + "zeroize", ] [[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + +[[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.102.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" dependencies = [ "ring", + "rustls-pki-types", "untrusted", ] @@ -1187,20 +1272,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] name = "semver" version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +dependencies = [ + "serde", +] [[package]] name = "serde" @@ -1268,6 +1346,9 @@ name = "smallvec" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -1298,6 +1379,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] name = "syn" version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1310,9 +1397,9 @@ dependencies = [ [[package]] name = "system-interface" -version = "0.26.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0682e006dd35771e392a6623ac180999a9a854b1d4a6c12fb2e804941c2b1f58" +checksum = "b858526d22750088a9b3cf2e3c2aacebd5377f13adeec02860c30d09113010a6" dependencies = [ "bitflags 2.4.2", "cap-fs-ext", @@ -1326,9 +1413,18 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.13" +version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] [[package]] name = "thiserror" @@ -1383,11 +1479,12 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ "rustls", + "rustls-pki-types", "tokio", ] @@ -1509,13 +1606,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasi-cap-std-sync" -version = "17.0.0" +name = "wasi-common" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db014d2ced91f17d1f1a8f2b76d6ea8d731bc1dbc8c2bbaec689d6a242568e5d" +checksum = "7336747832c6fe1086c81ef38b63dfeaeec48fc1b7c33a88fd16115cc940d178" dependencies = [ "anyhow", - "async-trait", + "bitflags 2.4.2", "cap-fs-ext", "cap-rand", "cap-std", @@ -1523,27 +1620,10 @@ dependencies = [ "fs-set-times", "io-extras", "io-lifetimes", + "log", "once_cell", "rustix", "system-interface", - "tracing", - "wasi-common", - "windows-sys 0.52.0", -] - -[[package]] -name = "wasi-common" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "449d17849e3c83a931374442fe2deee4d6bd1ebf469719ef44192e9e82e19c89" -dependencies = [ - "anyhow", - "bitflags 2.4.2", - "cap-rand", - "cap-std", - "io-extras", - "log", - "rustix", "thiserror", "tracing", "wasmtime", @@ -1607,9 +1687,9 @@ checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "wasm-encoder" -version = "0.38.1" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f" +checksum = "4fb56df3e06b8e6b77e37d2969a50ba51281029a9aeb3855e76b7f49b6418847" dependencies = [ "leb128", ] @@ -1622,11 +1702,14 @@ dependencies = [ "bindgen", "bytes", "cc", + "futures", "futures-util", "http", "http-body", "http-body-util", + "hyper", "tokio", + "wasi-common", "wasmtime", "wasmtime-wasi", "wasmtime-wasi-http", @@ -1634,83 +1717,89 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.118.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ee9723b928e735d53000dec9eae7b07a60e490c85ab54abb66659fc61bfcd9" -dependencies = [ - "indexmap", - "semver", -] - -[[package]] -name = "wasmparser" -version = "0.121.0" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953cf6a7606ab31382cb1caa5ae403e77ba70c7f8e12eeda167e7040d42bfda8" +checksum = "53fbde0881f24199b81cf49b6ff8f9c145ac8eb1b7fc439adb5c099734f7d90e" dependencies = [ + "ahash", "bitflags 2.4.2", + "hashbrown 0.14.3", "indexmap", "semver", + "serde", ] [[package]] name = "wasmprinter" -version = "0.2.78" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05e32c13c59fdc64d3f6998a1d52eb1d362b6904a88b754190ccb85661ad577a" +checksum = "d8e9a325d85053408209b3d2ce5eaddd0dd6864d1cff7a007147ba073157defc" dependencies = [ "anyhow", - "wasmparser 0.121.0", + "termcolor", + "wasmparser", ] [[package]] name = "wasmtime" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "910fabce77e660f0e0e41cfd5f69fc8bf020a025f059718846e918db7177f469" +checksum = "9a5883d64dfc8423c56e3d8df27cffc44db25336aa468e8e0724fddf30a333d7" dependencies = [ "anyhow", "async-trait", - "bincode", + "bitflags 2.4.2", "bumpalo", + "cc", "cfg-if", "encoding_rs", + "hashbrown 0.14.3", "indexmap", "libc", + "libm", "log", - "object", + "mach2", + "memfd", + "object 0.36.3", "once_cell", "paste", + "postcard", + "psm", + "rustix", + "semver", "serde", "serde_derive", - "serde_json", + "smallvec", + "sptr", "target-lexicon", - "wasmparser 0.118.1", + "wasmparser", + "wasmtime-asm-macros", "wasmtime-component-macro", "wasmtime-component-util", "wasmtime-cranelift", "wasmtime-environ", "wasmtime-fiber", - "wasmtime-jit", - "wasmtime-runtime", + "wasmtime-jit-icache-coherence", + "wasmtime-slab", + "wasmtime-versioned-export-macros", "wasmtime-winch", "windows-sys 0.52.0", ] [[package]] name = "wasmtime-asm-macros" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37288142e9b4a61655a3bcbdc7316c2e4bb9e776b10ce3dd758f8186b4469572" +checksum = "1c4dc7e2a379c0dd6be5b55857d14c4b277f43a9c429a9e14403eb61776ae3be" dependencies = [ "cfg-if", ] [[package]] name = "wasmtime-component-macro" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad63de18eb42e586386b6091f787c82707cbd5ac5e9343216dba1976190cd03a" +checksum = "4b07773d1c3dab5f014ec61316ee317aa424033e17e70a63abdf7c3a47e58fcf" dependencies = [ "anyhow", "proc-macro2", @@ -1723,15 +1812,15 @@ dependencies = [ [[package]] name = "wasmtime-component-util" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e0a160c0c44369aa4bee6d311a8e4366943bab1651040cc8b0fcec2c9eb8906" +checksum = "e38d735320f4e83478369ce649ad8fe87c6b893220902e798547a225fc0c5874" [[package]] name = "wasmtime-cranelift" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3734cc01b7cd37bc62fdbcd9529ca9547440052d4b3886cfdec3b8081a5d3647" +checksum = "e570d831d0785d93d7d8c722b1eb9a34e0d0c1534317666f65892818358a2da9" dependencies = [ "anyhow", "cfg-if", @@ -1741,51 +1830,36 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.29.0", "log", - "object", + "object 0.36.3", "target-lexicon", "thiserror", - "wasmparser 0.118.1", - "wasmtime-cranelift-shared", + "wasmparser", "wasmtime-environ", "wasmtime-versioned-export-macros", ] [[package]] -name = "wasmtime-cranelift-shared" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0eb33cd30c47844aa228d4d0030587e65c1108343f311fe9f7248b5bd9cb65c" -dependencies = [ - "anyhow", - "cranelift-codegen", - "cranelift-control", - "cranelift-native", - "gimli", - "object", - "target-lexicon", - "wasmtime-environ", -] - -[[package]] name = "wasmtime-environ" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a056b041fdea604f0972e2fae97958e7748d629a55180228348baefdfc217" +checksum = "c5fe80dfbd81687431a7d4f25929fae1ae96894786d5c96b14ae41164ee97377" dependencies = [ "anyhow", + "cranelift-bitset", "cranelift-entity", - "gimli", + "gimli 0.29.0", "indexmap", "log", - "object", + "object 0.36.3", + "postcard", + "semver", "serde", "serde_derive", "target-lexicon", - "thiserror", "wasm-encoder", - "wasmparser 0.118.1", + "wasmparser", "wasmprinter", "wasmtime-component-util", "wasmtime-types", @@ -1793,9 +1867,9 @@ dependencies = [ [[package]] name = "wasmtime-fiber" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43987d0977c07f15c3608c2f255870c127ffd19e35eeedb1ac1dccedf9932a42" +checksum = "0f39043d13c7b58db69dc9a0feb191a961e75a9ec2402aebf42de183c022bb8a" dependencies = [ "anyhow", "cc", @@ -1807,85 +1881,42 @@ dependencies = [ ] [[package]] -name = "wasmtime-jit" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b3e48395ac672b386ed588d97a9612aa13a345008f26466f0dfb2a91628aa9f" -dependencies = [ - "anyhow", - "bincode", - "cfg-if", - "gimli", - "log", - "object", - "rustix", - "serde", - "serde_derive", - "target-lexicon", - "wasmtime-environ", - "wasmtime-jit-icache-coherence", - "wasmtime-runtime", - "windows-sys 0.52.0", -] - -[[package]] name = "wasmtime-jit-icache-coherence" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdc26415bb89e9ccd3bdc498fef63aabf665c4c0dd710c107691deb9694955da" +checksum = "d15de8429db996f0d17a4163a35eccc3f874cbfb50f29c379951ea1bbb39452e" dependencies = [ + "anyhow", "cfg-if", "libc", "windows-sys 0.52.0", ] [[package]] -name = "wasmtime-runtime" -version = "17.0.0" +name = "wasmtime-slab" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0abddaf17912aabaf39be0802d5eba9a002e956e902d1ebd438a2fe1c88769a2" -dependencies = [ - "anyhow", - "cc", - "cfg-if", - "encoding_rs", - "indexmap", - "libc", - "log", - "mach", - "memfd", - "memoffset", - "paste", - "psm", - "rustix", - "sptr", - "wasm-encoder", - "wasmtime-asm-macros", - "wasmtime-environ", - "wasmtime-fiber", - "wasmtime-versioned-export-macros", - "wasmtime-wmemcheck", - "windows-sys 0.52.0", -] +checksum = "1f68d38fa6b30c5e1fc7d608263062997306f79e577ebd197ddcd6b0f55d87d1" [[package]] name = "wasmtime-types" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35a95cdc1433729085beab42c0a5c742b431f25b17c40d7718e46df63d5ffc7" +checksum = "6634e7079d9c5cfc81af8610ed59b488cc5b7f9777a2f4c1667a2565c2e45249" dependencies = [ + "anyhow", "cranelift-entity", "serde", "serde_derive", - "thiserror", - "wasmparser 0.118.1", + "smallvec", + "wasmparser", ] [[package]] name = "wasmtime-versioned-export-macros" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad322733fe67e45743784d8b1df452bcb54f581572a4f1a646a4332deecbcc2" +checksum = "3850e3511d6c7f11a72d571890b0ed5f6204681f7f050b9de2690e7f13123fed" dependencies = [ "proc-macro2", "quote", @@ -1894,9 +1925,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "902cc299b73655c36679b77efdfce4bb5971992f1a4a8a436dd3809a6848ff0e" +checksum = "545ae8298ffce025604f7480f9c7d6948c985bef7ce9aee249ef79307813e83c" dependencies = [ "anyhow", "async-trait", @@ -1911,8 +1942,6 @@ dependencies = [ "futures", "io-extras", "io-lifetimes", - "libc", - "log", "once_cell", "rustix", "system-interface", @@ -1920,8 +1949,6 @@ dependencies = [ "tokio", "tracing", "url", - "wasi-cap-std-sync", - "wasi-common", "wasmtime", "wiggle", "windows-sys 0.52.0", @@ -1929,9 +1956,9 @@ dependencies = [ [[package]] name = "wasmtime-wasi-http" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "151fc711fad35034b8a6df00a5bcd5a7b1acb89ca12c2407f564a36ebd382e26" +checksum = "f5b50208c61fed1ac138b6bf84b8b44c921ba16ac79b1a511804ecd95c98fd73" dependencies = [ "anyhow", "async-trait", @@ -1952,26 +1979,26 @@ dependencies = [ [[package]] name = "wasmtime-winch" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e63aeca929f84560eec52c5af43bf5d623b92683b0195d9fb06da8ed860e092" +checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", - "object", + "gimli 0.29.0", + "object 0.36.3", "target-lexicon", - "wasmparser 0.118.1", - "wasmtime-cranelift-shared", + "wasmparser", + "wasmtime-cranelift", "wasmtime-environ", "winch-codegen", ] [[package]] name = "wasmtime-wit-bindgen" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41e5675998fdc74495afdd90ad2bd221206a258075b23048af0535a969b07893" +checksum = "3cb331ac7ed1d5ba49cddcdb6b11973752a857148858bb308777d2fc5584121f" dependencies = [ "anyhow", "heck", @@ -1980,12 +2007,6 @@ dependencies = [ ] [[package]] -name = "wasmtime-wmemcheck" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b20a19e10d8cb50b45412fb21192982b7ce85c0122dc33bb71f1813e25dc6e52" - -[[package]] name = "wast" version = "35.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1996,9 +2017,12 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.4" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" +dependencies = [ + "rustls-pki-types", +] [[package]] name = "which" @@ -2014,9 +2038,9 @@ dependencies = [ [[package]] name = "wiggle" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "737728db69a7657a5f6a7bac445c02d8564d603d62c46c95edf928554e67d072" +checksum = "cc850ca3c02c5835934d23f28cec4c5a3fb66fe0b4ecd968bbb35609dda5ddc0" dependencies = [ "anyhow", "async-trait", @@ -2029,9 +2053,9 @@ dependencies = [ [[package]] name = "wiggle-generate" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2460c7163b79ffefd9a564eaeab0a5b0e84bb91afdfeeb84d36f304ddbe08982" +checksum = "634b8804a67200bcb43ea8af5f7c53e862439a086b68b16fd333454bc74d5aab" dependencies = [ "anyhow", "heck", @@ -2044,9 +2068,9 @@ dependencies = [ [[package]] name = "wiggle-macro" -version = "17.0.0" +version = "24.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa8d8412375ba8325d61fbae56dead51dabfaec85d620ce36427922fb9cece83" +checksum = "474b7cbdb942c74031e619d66c600bba7f73867c5800fc2c2306cf307649be2f" dependencies = [ "proc-macro2", "quote", @@ -2071,6 +2095,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2078,17 +2111,18 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "0.15.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d2b346bad5397b219b4ff0a8fa7230936061ff07c61f05d589d8d81e06fb7b2" +checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "regalloc2", "smallvec", "target-lexicon", - "wasmparser 0.118.1", + "wasmparser", + "wasmtime-cranelift", "wasmtime-environ", ] @@ -2245,9 +2279,9 @@ dependencies = [ [[package]] name = "wit-parser" -version = "0.13.1" +version = "0.215.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df4913a2219096373fd6512adead1fb77ecdaa59d7fc517972a7d30b12f625be" +checksum = "935a97eaffd57c3b413aa510f8f0b550a4a9fe7d59e79cd8b89a83dcb860321f" dependencies = [ "anyhow", "id-arena", @@ -2258,6 +2292,7 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", + "wasmparser", ] [[package]] @@ -2291,3 +2326,9 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/src/wasm-wasi-component/Cargo.toml b/src/wasm-wasi-component/Cargo.toml index feb7f53c..bc325826 100644 --- a/src/wasm-wasi-component/Cargo.toml +++ b/src/wasm-wasi-component/Cargo.toml @@ -10,14 +10,17 @@ crate-type = ["cdylib"] [dependencies] anyhow = "1.0.75" bytes = "1.5.0" +futures = "0.3.30" futures-util = { version = "0.3.29", default-features = false } http = "1.0.0" http-body = { version = "1.0.0", default-features = false } http-body-util = "0.1.0" +hyper = "1.4.1" tokio = { version = "1.33.0", default-features = false } -wasmtime = { version = "17.0.0", default-features = false, features = ['component-model', 'cranelift'] } -wasmtime-wasi = "17.0.0" -wasmtime-wasi-http = "17.0.0" +wasi-common = "24.0.0" +wasmtime = { version = "24.0.0", default-features = false, features = ['component-model', 'cranelift'] } +wasmtime-wasi = "24.0.0" +wasmtime-wasi-http = "24.0.0" [build-dependencies] bindgen = "0.68.1" diff --git a/src/wasm-wasi-component/src/lib.rs b/src/wasm-wasi-component/src/lib.rs index b0552e81..93c26214 100644 --- a/src/wasm-wasi-component/src/lib.rs +++ b/src/wasm-wasi-component/src/lib.rs @@ -2,19 +2,18 @@ use anyhow::{bail, Context, Result}; use bytes::{Bytes, BytesMut}; use http_body_util::combinators::BoxBody; use http_body_util::{BodyExt, Full}; +use hyper::Error; use std::ffi::{CStr, CString}; use std::mem::MaybeUninit; use std::process::exit; use std::ptr; use std::sync::OnceLock; use tokio::sync::mpsc; -use wasmtime::component::{Component, InstancePre, Linker, ResourceTable}; +use wasmtime::component::{Component, Linker, ResourceTable}; use wasmtime::{Config, Engine, Store}; -use wasmtime_wasi::preview2::{ - DirPerms, FilePerms, WasiCtx, WasiCtxBuilder, WasiView, -}; -use wasmtime_wasi::{ambient_authority, Dir}; -use wasmtime_wasi_http::bindings::http::types::ErrorCode; +use wasmtime_wasi::{DirPerms, FilePerms, WasiCtx, WasiCtxBuilder, WasiView}; +use wasmtime_wasi_http::bindings::http::types::{ErrorCode, Scheme}; +use wasmtime_wasi_http::bindings::ProxyPre; use wasmtime_wasi_http::{WasiHttpCtx, WasiHttpView}; #[allow( @@ -180,7 +179,7 @@ struct GlobalConfig { struct GlobalState { engine: Engine, - component: InstancePre<StoreState>, + component: ProxyPre<StoreState>, global_config: &'static GlobalConfig, sender: mpsc::Sender<NxtRequestInfo>, } @@ -209,11 +208,15 @@ impl GlobalState { let component = Component::from_file(&engine, &global_config.component) .context("failed to compile component")?; let mut linker = Linker::<StoreState>::new(&engine); - wasmtime_wasi::preview2::command::add_to_linker(&mut linker)?; - wasmtime_wasi_http::proxy::add_only_http_to_linker(&mut linker)?; + wasmtime_wasi::add_to_linker_async(&mut linker) + .context("failed to add wasi to linker")?; + wasmtime_wasi_http::add_only_http_to_linker_sync(&mut linker) + .context("failed to add wasi:http to linker")?; let component = linker .instantiate_pre(&component) .context("failed to pre-instantiate the provided component")?; + let proxy = + ProxyPre::new(component).context("failed to conform to proxy")?; // Spin up the Tokio async runtime in a separate thread with a // communication channel into it. This thread will send requests to @@ -223,7 +226,7 @@ impl GlobalState { Ok(GlobalState { engine, - component, + component: proxy, sender, global_config, }) @@ -256,22 +259,19 @@ impl GlobalState { // shouldn't get raw access to stdout/stderr. cx.inherit_stdout(); cx.inherit_stderr(); + cx.inherit_env(); for dir in self.global_config.dirs.iter() { - let fd = Dir::open_ambient_dir(dir, ambient_authority()) - .with_context(|| { - format!("failed to open directory '{dir}'") - })?; cx.preopened_dir( - fd, + dir, + dir, DirPerms::all(), FilePerms::all(), - dir, - ); + )?; } cx.build() }, table: ResourceTable::default(), - http: WasiHttpCtx, + http: WasiHttpCtx::new(), }; let mut store = Store::new(&self.engine, data); @@ -292,15 +292,13 @@ impl GlobalState { // generate headers, write those below, and then compute the body // afterwards. let task = tokio::spawn(async move { - let (proxy, _) = wasmtime_wasi_http::proxy::Proxy::instantiate_pre( - &mut store, - &self.component, - ) - .await - .context("failed to instantiate")?; - let req = store.data_mut().new_incoming_request(request)?; + let req = store + .data_mut() + .new_incoming_request(Scheme::Http, request)?; let out = store.data_mut().new_response_outparam(sender)?; - proxy + self.component + .instantiate_async(&mut store) + .await? .wasi_http_incoming_handler() .call_handle(&mut store, req, out) .await @@ -376,7 +374,7 @@ impl GlobalState { fn to_request_body( &self, info: &mut NxtRequestInfo, - ) -> BoxBody<Bytes, ErrorCode> { + ) -> BoxBody<Bytes, Error> { // TODO: should convert the body into a form of `Stream` to become an // async stream of frames. The return value can represent that here // but for now this slurps up the entire body into memory and puts it @@ -594,16 +592,10 @@ struct StoreState { } impl WasiView for StoreState { - fn table(&self) -> &ResourceTable { - &self.table - } - fn table_mut(&mut self) -> &mut ResourceTable { + fn table(&mut self) -> &mut ResourceTable { &mut self.table } - fn ctx(&self) -> &WasiCtx { - &self.ctx - } - fn ctx_mut(&mut self) -> &mut WasiCtx { + fn ctx(&mut self) -> &mut WasiCtx { &mut self.ctx } } diff --git a/src/wasm/nxt_wasm.c b/src/wasm/nxt_wasm.c index 92ed57ab..db79d6ae 100644 --- a/src/wasm/nxt_wasm.c +++ b/src/wasm/nxt_wasm.c @@ -246,7 +246,7 @@ nxt_wasm_setup(nxt_task_t *task, nxt_process_t *process, nxt_conf_value_t *dirs = NULL; nxt_wasm_app_conf_t *c; nxt_wasm_func_handler_t *fh; - static nxt_str_t filesystem_str = nxt_string("filesystem"); + static const nxt_str_t filesystem_str = nxt_string("filesystem"); c = &conf->u.wasm; diff --git a/test/conftest.py b/test/conftest.py index 2fe4d8dc..91c59e17 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -429,7 +429,8 @@ def _clear_temp_dir(): temporary_dir = unit_instance['temp_dir'] if is_findmnt and not waitforunmount(temporary_dir, timeout=600): - sys.exit('Could not unmount filesystems in tmpdir ({temporary_dir}).') + Log.print_log() + sys.exit(f'Could not unmount filesystems in tmpdir ({temporary_dir}).') for item in Path(temporary_dir).iterdir(): if item.name not in [ diff --git a/test/python/factory/wsgi.py b/test/python/factory/wsgi.py new file mode 100644 index 00000000..8ad4887b --- /dev/null +++ b/test/python/factory/wsgi.py @@ -0,0 +1,23 @@ +def wsgi_a(env, start_response): + start_response("200", [("Content-Length", "1")]) + return [b"1"] + + +def wsgi_b(env, start_response): + start_response("200", [("Content-Length", "1")]) + return [b"2"] + + +def wsgi_a_factory(): + return wsgi_a + + +def wsgi_b_factory(): + return wsgi_b + + +wsgi_invalid_callable = None + + +def wsgi_factory_returning_invalid_callable(): + return wsgi_invalid_callable diff --git a/test/test_chunked.py b/test/test_chunked.py new file mode 100644 index 00000000..caa26f7e --- /dev/null +++ b/test/test_chunked.py @@ -0,0 +1,188 @@ +import re + +import pytest +from unit.applications.lang.python import ApplicationPython + +prerequisites = {'modules': {'python': 'any'}} + +client = ApplicationPython() + + +@pytest.fixture(autouse=True) +def setup_method_fixture(): + client.load('mirror') + + assert 'success' in client.conf( + {"http": {"chunked_transform": True}}, 'settings' + ) + + +def test_chunked(): + def chunks(chunks=[]): + body = '' + + for c in chunks: + body = f'{body}{len(c):x}\r\n{c}\r\n' + + resp = client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body=f'{body}0\r\n\r\n', + ) + + expect_body = ''.join(chunks) + + assert resp['status'] == 200 + assert resp['headers']['Content-Length'] == str(len(expect_body)) + assert resp['body'] == expect_body + + chunks() + chunks(['1']) + chunks(['0123456789']) + chunks(['0123456789' * 128]) + chunks(['0123456789' * 512]) + chunks(['0123456789' * 128, '1', '1', '0123456789' * 128, '1']) + + +def test_chunked_pipeline(): + sock = client.get( + no_recv=True, + headers={ + 'Host': 'localhost', + 'Transfer-Encoding': 'chunked', + }, + body='1\r\n$\r\n0\r\n\r\n', + ) + + resp = client.get( + sock=sock, + headers={ + 'Host': 'localhost', + 'Transfer-Encoding': 'chunked', + 'Connection': 'close', + }, + body='1\r\n%\r\n0\r\n\r\n', + raw_resp=True, + ) + + assert len(re.findall('200 OK', resp)) == 2 + assert len(re.findall('Content-Length: 1', resp)) == 2 + assert len(re.findall('$', resp)) == 1 + assert len(re.findall('%', resp)) == 1 + + +def test_chunked_max_body_size(): + assert 'success' in client.conf( + {'max_body_size': 1024, 'chunked_transform': True}, 'settings/http' + ) + + body = f'{2048:x}\r\n{"x" * 2048}\r\n0\r\n\r\n' + + assert ( + client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body=body, + )['status'] + == 413 + ) + + +def test_chunked_after_last(): + resp = client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body='1\r\na\r\n0\r\n\r\n1\r\nb\r\n0\r\n\r\n', + ) + + assert resp['status'] == 200 + assert resp['headers']['Content-Length'] == '1' + assert resp['body'] == 'a' + + +def test_chunked_transform(): + assert 'success' in client.conf( + {"http": {"chunked_transform": False}}, 'settings' + ) + + assert ( + client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body='0\r\n\r\n', + )['status'] + == 411 + ) + + +def test_chunked_invalid(): + # invalid chunkes + + def check_body(body): + assert ( + client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body=body, + )['status'] + == 400 + ) + + check_body('1\r\nblah\r\n0\r\n\r\n') + check_body('1\r\n\r\n1\r\n0\r\n\r\n') + check_body('1\r\n1\r\n\r\n0\r\n\r\n') + + # invalid transfer encoding header + + assert ( + client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': ['chunked', 'chunked'], + }, + body='0\r\n\r\n', + )['status'] + == 400 + ), 'two Transfer-Encoding headers' + + assert ( + client.get( + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + 'Content-Length': '5', + }, + body='0\r\n\r\n', + )['status'] + == 400 + ), 'Transfer-Encoding and Content-Length' + + assert ( + client.get( + http_10=True, + headers={ + 'Host': 'localhost', + 'Connection': 'close', + 'Transfer-Encoding': 'chunked', + }, + body='0\r\n\r\n', + )['status'] + == 400 + ), 'Transfer-Encoding HTTP/1.0' diff --git a/test/test_python_application.py b/test/test_python_application.py index 466a59a2..c4803153 100644 --- a/test/test_python_application.py +++ b/test/test_python_application.py @@ -10,6 +10,7 @@ import pytest from packaging import version from unit.applications.lang.python import ApplicationPython +from unit.option import option prerequisites = {'modules': {'python': 'all'}} @@ -64,6 +65,45 @@ custom-header: BLAH }, 'headers' assert resp['body'] == body, 'body' + # REQUEST_URI unchanged + + path = f'{option.test_dir}/python/variables' + assert 'success' in client.conf( + { + "listeners": {"*:8080": {"pass": "routes"}}, + "routes": [ + { + "action": { + "rewrite": "/foo", + "pass": "applications/variables", + } + } + ], + "applications": { + "variables": { + "type": client.get_application_type(), + "processes": {'spare': 0}, + "path": path, + "working_directory": path, + "module": "wsgi", + } + }, + } + ) + + resp = client.http( + f"""POST /bar HTTP/1.1 +Host: localhost +Content-Length: 1 +Custom-Header: blah +Content-Type: text/html +Connection: close + +a""".encode(), + raw=True, + ) + assert resp['headers']['Request-Uri'] == '/bar', 'REQUEST_URI unchanged' + def test_python_application_query_string(): client.load('query_string') diff --git a/test/test_python_factory.py b/test/test_python_factory.py new file mode 100644 index 00000000..d1752c24 --- /dev/null +++ b/test/test_python_factory.py @@ -0,0 +1,140 @@ +from unit.applications.lang.python import ApplicationPython +from unit.option import option + +prerequisites = {"modules": {"python": "all"}} + +client = ApplicationPython() + + +def test_python_factory_targets(): + python_dir = f"{option.test_dir}/python" + + assert "success" in client.conf( + { + "listeners": { + "*:8080": {"pass": "applications/targets/1"}, + "*:8081": {"pass": "applications/targets/2"}, + "*:8082": {"pass": "applications/targets/factory-1"}, + "*:8083": {"pass": "applications/targets/factory-2"}, + }, + "applications": { + "targets": { + "type": client.get_application_type(), + "working_directory": f"{python_dir}/factory/", + "path": f"{python_dir}/factory/", + "targets": { + "1": { + "module": "wsgi", + "callable": "wsgi_a", + "factory": False, + }, + "2": { + "module": "wsgi", + "callable": "wsgi_b", + "factory": False, + }, + "factory-1": { + "module": "wsgi", + "callable": "wsgi_a_factory", + "factory": True, + }, + "factory-2": { + "module": "wsgi", + "callable": "wsgi_b_factory", + "factory": True, + }, + }, + } + }, + } + ) + + resp = client.get(port=8080) + assert resp["status"] == 200 + assert resp["body"] == "1" + + resp = client.get(port=8081) + assert resp["status"] == 200 + assert resp["body"] == "2" + + resp = client.get(port=8082) + assert resp["status"] == 200 + assert resp["body"] == "1" + + resp = client.get(port=8083) + assert resp["status"] == 200 + assert resp["body"] == "2" + + +def test_python_factory_without_targets(): + python_dir = f"{option.test_dir}/python" + + assert "success" in client.conf( + { + "listeners": { + "*:8080": {"pass": "applications/python-app-factory"}, + "*:8081": {"pass": "applications/python-app"}, + }, + "applications": { + "python-app-factory": { + "type": client.get_application_type(), + "working_directory": f"{python_dir}/factory/", + "path": f"{python_dir}/factory/", + "module": "wsgi", + "callable": "wsgi_a_factory", + "factory": True, + }, + "python-app": { + "type": client.get_application_type(), + "working_directory": f"{python_dir}/factory/", + "path": f"{python_dir}/factory/", + "module": "wsgi", + "callable": "wsgi_b", + "factory": False, + }, + }, + } + ) + + resp = client.get(port=8080) + assert resp["status"] == 200 + assert resp["body"] == "1" + + resp = client.get(port=8081) + assert resp["status"] == 200 + assert resp["body"] == "2" + + +def test_python_factory_invalid_callable_value(skip_alert): + skip_alert( + r"failed to apply new conf", + r"did not return callable object", + r"can not be called to fetch callable", + ) + python_dir = f"{option.test_dir}/python" + + invalid_callable_values = [ + "wsgi_factory_returning_invalid_callable", + "wsgi_invalid_callable", + ] + + for callable_value in invalid_callable_values: + assert "error" in client.conf( + { + "listeners": {"*:8080": {"pass": "applications/targets/1"}}, + "applications": { + "targets": { + "type": client.get_application_type(), + "working_directory": f"{python_dir}/factory/", + "path": f"{python_dir}/factory/", + "targets": { + "1": { + "module": "wsgi", + "callable": callable_value, + "factory": True, + }, + }, + } + }, + } + ) diff --git a/test/test_response_headers.py b/test/test_response_headers.py index e62c1293..ddc22124 100644 --- a/test/test_response_headers.py +++ b/test/test_response_headers.py @@ -163,12 +163,11 @@ def test_response_headers_remove(): def test_response_headers_invalid(skip_alert): - skip_alert(r'failed to apply new conf') - def check_invalid(conf): - assert 'error' in client.conf( - conf, - 'routes/0/action/response_headers', - ) + resp = client.conf(conf, 'routes/0/action/response_headers') + assert 'error' in resp + + return resp - check_invalid({"X-Foo": "$u"}) + resp = check_invalid({"X-Foo": "$u"}) + assert 'detail' in resp and 'Unknown variable' in resp['detail'] diff --git a/test/test_routing.py b/test/test_routing.py index 0b6eced2..170f627e 100644 --- a/test/test_routing.py +++ b/test/test_routing.py @@ -2009,3 +2009,75 @@ def test_routes_match_destination_proxy(): ), 'proxy configure' assert client.get()['status'] == 200, 'proxy' + + +def set_if(condition): + assert 'success' in client.conf(f'"{condition}"', 'routes/0/match/if') + + +def test_routes_match_if(): + + def try_if(condition, status): + set_if(condition) + assert client.get(url=f'/{condition}')['status'] == status + + assert 'success' in client.conf( + { + "listeners": {"*:8080": {"pass": "routes"}}, + "routes": [ + { + "match": {"method": "GET"}, + "action": {"return": 200}, + } + ], + "applications": {}, + } + ), 'routing configure' + + # const + + try_if('', 404) + try_if('0', 404) + try_if('false', 404) + try_if('undefined', 404) + try_if('!', 200) + try_if('!null', 200) + try_if('1', 200) + + # variable + + set_if('$arg_foo') + assert client.get(url='/bar?bar')['status'] == 404 + assert client.get(url='/foo_empty?foo')['status'] == 404 + assert client.get(url='/foo?foo=1')['status'] == 200 + + set_if('!$arg_foo') + assert client.get(url='/bar?bar')['status'] == 200 + assert client.get(url='/foo_empty?foo')['status'] == 200 + assert client.get(url='/foo?foo=1')['status'] == 404 + +def test_routes_match_if_njs(require): + require({'modules': {'njs': 'any'}}) + + assert 'success' in client.conf( + { + "listeners": {"*:8080": {"pass": "routes"}}, + "routes": [ + { + "match": {"method": "GET"}, + "action": {"return": 200}, + } + ], + "applications": {}, + } + ) + + set_if('`${args.foo == \'1\'}`') + assert client.get(url='/foo_1?foo=1')['status'] == 200 + assert client.get(url='/foo_2?foo=2')['status'] == 404 + + set_if('!`${args.foo == \'1\'}`') + assert client.get(url='/foo_1?foo=1')['status'] == 404 + assert client.get(url='/foo_2?foo=2')['status'] == 200 + + assert 'error' in client.conf('$arg_', 'routes/0/match/if') diff --git a/test/test_variables.py b/test/test_variables.py index 9aab8a62..e20a3cd7 100644 --- a/test/test_variables.py +++ b/test/test_variables.py @@ -93,7 +93,9 @@ def test_variables_method(search_in_file, wait_for_record): assert wait_for_record(reg, 'access.log') is not None, 'method POST' -def test_variables_request_uri(search_in_file, wait_for_record): +def test_variables_request_uri( + findall, search_in_file, temp_dir, wait_for_record +): set_format('$request_uri') def check_request_uri(req_uri): @@ -108,6 +110,103 @@ def test_variables_request_uri(search_in_file, wait_for_record): check_request_uri('/4%2A') check_request_uri('/9?q#a') + # $request_uri + proxy + + assert 'success' in client.conf( + { + "listeners": { + "*:8080": {"pass": "routes/a"}, + "[::1]:8081": {"pass": "routes/b"}, + }, + "routes": { + "a": [ + { + "action": { + "proxy": "http://[::1]:8081", + } + } + ], + "b": [ + { + "action": { + "return": 200, + } + } + ], + }, + "access_log": { + "path": f'{temp_dir}/access.log', + "format": "$remote_addr $uri $request_uri", + }, + } + ) + + assert search_in_file(r'::1', 'access.log') is None + + assert client.get(url='/blah%25blah?a=b')['status'] == 200 + + assert ( + wait_for_record(fr'^::1 /blah%blah /blah%25blah\?a=b$', 'access.log') + is not None + ), 'req 8081 (proxy)' + assert ( + search_in_file( + fr'^127\.0\.0\.1 /blah%blah /blah%25blah\?a=b$', 'access.log' + ) + is not None + ), 'req 8080' + + # rewrite set $request_uri before proxy + + assert 'success' in client.conf( + { + "a": [ + { + "action": { + "rewrite": "/foo", + "proxy": "http://[::1]:8081", + } + } + ], + "b": [ + { + "action": { + "rewrite": "/bar", + "return": 200, + } + } + ], + }, + 'routes', + ) + + assert len(findall(r'::1', 'access.log')) == 1 + + assert client.get(url='/blah%2Fblah?a=b')['status'] == 200 + + assert ( + wait_for_record(fr'^::1 /bar /foo\?a=b$', 'access.log') is not None + ), 'req 8081 (proxy) rewrite' + assert ( + search_in_file(fr'^127\.0\.0\.1 /foo /blah%2Fblah\?a=b$', 'access.log') + is not None + ), 'req 8080 rewrite' + + # percent-encoded rewrite + + assert len(findall(r'::1', 'access.log')) == 2 + + assert 'success' in client.conf('"/foo%2Ffoo"', 'routes/a/0/action/rewrite') + assert client.get(url='/blah%2Fblah?a=b')['status'] == 200 + + assert ( + wait_for_record( + fr'^127\.0\.0\.1 /foo/foo /blah%2Fblah\?a=b$', 'access.log' + ) + is not None + ), 'req 8080 percent' + assert len(findall(fr'^::1 /bar /foo/foo\?a=b$', 'access.log')) == 1 + def test_variables_uri(search_in_file, wait_for_record): set_format('$uri') diff --git a/test/test_wasm-wasi-component.py b/test/test_wasm-wasi-component.py new file mode 100644 index 00000000..6d3bc485 --- /dev/null +++ b/test/test_wasm-wasi-component.py @@ -0,0 +1,18 @@ +import pytest +from unit.applications.lang.wasm_component import ApplicationWasmComponent + +prerequisites = { + 'modules': {'wasm-wasi-component': 'any'}, + 'features': {'cargo_component': True}, +} + +client = ApplicationWasmComponent() + + +def test_wasm_component(): + client.load('hello_world') + + req = client.get() + + assert client.get()['status'] == 200 + assert req['body'] == 'Hello' diff --git a/test/unit/applications/lang/java.py b/test/unit/applications/lang/java.py index 351d04ce..2b3194ae 100644 --- a/test/unit/applications/lang/java.py +++ b/test/unit/applications/lang/java.py @@ -53,7 +53,7 @@ class ApplicationJava(ApplicationProto): os.makedirs(classes_path) classpath = ( - f'{option.current_dir}/build/tomcat-servlet-api-9.0.86.jar' + f'{option.current_dir}/build/tomcat-servlet-api-9.0.93.jar' ) ws_jars = glob.glob( diff --git a/test/unit/applications/lang/wasm_component.py b/test/unit/applications/lang/wasm_component.py new file mode 100644 index 00000000..a6c8dd14 --- /dev/null +++ b/test/unit/applications/lang/wasm_component.py @@ -0,0 +1,63 @@ +from pathlib import Path +import shutil +import subprocess +from urllib.parse import quote + +from unit.applications.proto import ApplicationProto +from unit.option import option + + +class ApplicationWasmComponent(ApplicationProto): + @staticmethod + def prepare_env(script): + try: + subprocess.check_output( + ['cargo', 'component', '--help'], + stderr=subprocess.STDOUT, + ) + except (subprocess.CalledProcessError, FileNotFoundError): + return None + + temp_dir = Path(f'{option.temp_dir}/wasm_component/') + + if not temp_dir.exists(): + temp_dir.mkdir() + + app_path = f'{temp_dir}/{script}' + + shutil.copytree(f'{option.test_dir}/wasm_component/{script}', app_path) + + try: + output = subprocess.check_output( + ['cargo', 'component', 'build', '--release'], + cwd=app_path, + stderr=subprocess.STDOUT, + ) + except KeyboardInterrupt: + raise + + except subprocess.CalledProcessError: + return None + + return output + + def load(self, script, **kwargs): + self.prepare_env(script) + + component_path = f'{option.temp_dir}/wasm_component/{script}/target/wasm32-wasip1/release/test_wasi_component.wasm' + + self._load_conf( + { + "listeners": { + "*:8080": {"pass": f"applications/{quote(script, '')}"} + }, + "applications": { + script: { + "type": "wasm-wasi-component", + "processes": {"spare": 0}, + "component": component_path, + } + }, + }, + **kwargs, + ) diff --git a/test/unit/applications/tls.py b/test/unit/applications/tls.py index 75354dd9..b48293be 100644 --- a/test/unit/applications/tls.py +++ b/test/unit/applications/tls.py @@ -85,9 +85,13 @@ subjectAltName = @alt_names default_bits = 2048 encrypt_key = no distinguished_name = req_distinguished_name +x509_extensions = myca_extensions {a_sec if alt_names else ""} -[ req_distinguished_name ]''' +[ req_distinguished_name ] + +[ myca_extensions ] +basicConstraints = critical,CA:TRUE''' ) def load(self, script, name=None): diff --git a/test/unit/check/cargo_component.py b/test/unit/check/cargo_component.py new file mode 100644 index 00000000..1c194bfc --- /dev/null +++ b/test/unit/check/cargo_component.py @@ -0,0 +1,4 @@ +from unit.applications.lang.wasm_component import ApplicationWasmComponent + +def check_cargo_component(): + return ApplicationWasmComponent.prepare_env('hello_world') is not None diff --git a/test/unit/check/discover_available.py b/test/unit/check/discover_available.py index 1383a0c3..99e63604 100644 --- a/test/unit/check/discover_available.py +++ b/test/unit/check/discover_available.py @@ -1,6 +1,7 @@ import subprocess import sys +from unit.check.cargo_component import check_cargo_component from unit.check.chroot import check_chroot from unit.check.go import check_go from unit.check.isolation import check_isolation @@ -28,7 +29,7 @@ def discover_available(unit): # discover modules from log file - for module in Log.findall(r'module: ([a-zA-Z]+) (.*) ".*"$'): + for module in Log.findall(r'module: ([a-zA-Z\-]+) (.*) ".*"$'): versions = option.available['modules'].setdefault(module[0], []) if module[1] not in versions: versions.append(module[1]) @@ -44,6 +45,7 @@ def discover_available(unit): # Discover features using check. Features should be discovered after # modules since some features can require modules. + option.available['features']['cargo_component'] = check_cargo_component() option.available['features']['chroot'] = check_chroot() option.available['features']['isolation'] = check_isolation() option.available['features']['unix_abstract'] = check_unix_abstract() diff --git a/test/unit/http.py b/test/unit/http.py index 9401501b..e449c8b5 100644 --- a/test/unit/http.py +++ b/test/unit/http.py @@ -67,7 +67,7 @@ class HTTP1: headers['Content-Type'] = content_type - if 'Content-Length' not in headers: + if 'Content-Length' not in headers and 'Transfer-Encoding' not in headers: headers['Content-Length'] = len(body) for header, value in headers.items(): diff --git a/test/unit/status.py b/test/unit/status.py index 95096a96..679008d0 100644 --- a/test/unit/status.py +++ b/test/unit/status.py @@ -6,16 +6,16 @@ class Status: control = Control() def _check_zeros(): - assert Status.control.conf_get('/status') == { - 'connections': { + status = Status.control.conf_get('/status') + + assert status['connections'] == { 'accepted': 0, 'active': 0, 'idle': 0, 'closed': 0, - }, - 'requests': {'total': 0}, - 'applications': {}, } + assert status['requests'] == {'total': 0} + assert status['applications'] == {} def init(status=None): Status._status = ( @@ -31,6 +31,9 @@ class Status: if k in d2 } + if isinstance(d1, str) or isinstance(d1, list): + return d1 == d2 + return d1 - d2 return find_diffs(Status.control.conf_get('/status'), Status._status) diff --git a/test/wasm_component/hello_world/Cargo.lock b/test/wasm_component/hello_world/Cargo.lock new file mode 100644 index 00000000..2daeb73d --- /dev/null +++ b/test/wasm_component/hello_world/Cargo.lock @@ -0,0 +1,34 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + +[[package]] +name = "test-wasi-component" +version = "0.1.0" +dependencies = [ + "bitflags", + "wasi", + "wit-bindgen-rt", +] + +[[package]] +name = "wasi" +version = "0.13.0+wasi-0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "652cd73449d0b957a2743b70c72d79d34a5fa505696488f4ca90b46f6da94118" +dependencies = [ + "bitflags", + "wit-bindgen-rt", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "026d24a27f6712541fa534f2954bd9e0eb66172f033c2157c0f31d106255c497" diff --git a/test/wasm_component/hello_world/Cargo.toml b/test/wasm_component/hello_world/Cargo.toml new file mode 100644 index 00000000..a87fbeb5 --- /dev/null +++ b/test/wasm_component/hello_world/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test-wasi-component" +version = "0.1.0" +edition = "2021" + +[dependencies] +bitflags = "2.4.2" +wit-bindgen-rt = "0.21.0" +wasi = "0.13.0" + +[lib] +crate-type = ["cdylib"] + +[package.metadata.component] +package = "component:test-wasi-component" +proxy = true + +[package.metadata.component.dependencies] diff --git a/test/wasm_component/hello_world/src/bindings.rs b/test/wasm_component/hello_world/src/bindings.rs new file mode 100644 index 00000000..a0d74c42 --- /dev/null +++ b/test/wasm_component/hello_world/src/bindings.rs @@ -0,0 +1,109 @@ +// Generated by `wit-bindgen` 0.24.0. DO NOT EDIT! +// Options used: +#[doc(hidden)] +#[allow(non_snake_case)] +pub unsafe fn _export_hello_world_cabi<T: Guest>() -> *mut u8 { + #[cfg(target_arch = "wasm32")] + _rt::run_ctors_once(); + let result0 = T::hello_world(); + let ptr1 = _RET_AREA.0.as_mut_ptr().cast::<u8>(); + let vec2 = (result0.into_bytes()).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::<u8>(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1.add(4).cast::<usize>() = len2; + *ptr1.add(0).cast::<*mut u8>() = ptr2.cast_mut(); + ptr1 +} +#[doc(hidden)] +#[allow(non_snake_case)] +pub unsafe fn __post_return_hello_world<T: Guest>(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0.add(4).cast::<usize>(); + _rt::cabi_dealloc(l0, l1, 1); +} +pub trait Guest { + fn hello_world() -> _rt::String; +} +#[doc(hidden)] + +macro_rules! __export_world_example_cabi{ + ($ty:ident with_types_in $($path_to_types:tt)*) => (const _: () = { + + #[export_name = "hello-world"] + unsafe extern "C" fn export_hello_world() -> *mut u8 { + $($path_to_types)*::_export_hello_world_cabi::<$ty>() + } + #[export_name = "cabi_post_hello-world"] + unsafe extern "C" fn _post_return_hello_world(arg0: *mut u8,) { + $($path_to_types)*::__post_return_hello_world::<$ty>(arg0) + } + };); +} +#[doc(hidden)] +pub(crate) use __export_world_example_cabi; +#[repr(align(4))] +struct _RetArea([::core::mem::MaybeUninit<u8>; 8]); +static mut _RET_AREA: _RetArea = + _RetArea([::core::mem::MaybeUninit::uninit(); 8]); +mod _rt { + + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr as *mut u8, layout); + } + pub use alloc_crate::alloc; + pub use alloc_crate::string::String; + extern crate alloc as alloc_crate; +} + +/// Generates `#[no_mangle]` functions to export the specified type as the +/// root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] + +macro_rules! __export_example_impl { + ($ty:ident) => (self::export!($ty with_types_in self);); + ($ty:ident with_types_in $($path_to_types_root:tt)*) => ( + $($path_to_types_root)*::__export_world_example_cabi!($ty with_types_in $($path_to_types_root)*); + ) +} +#[doc(inline)] +pub(crate) use __export_example_impl as export; + +#[cfg(target_arch = "wasm32")] +#[link_section = "component-type:wit-bindgen:0.24.0:example:encoded world"] +#[doc(hidden)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 194] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07E\x01A\x02\x01A\x02\x01\ +@\0\0s\x04\0\x0bhello-world\x01\0\x04\x01%component:test-wasi-component/example\x04\ +\0\x0b\x0d\x01\0\x07example\x03\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dw\ +it-component\x070.202.0\x10wit-bindgen-rust\x060.24.0"; + +#[inline(never)] +#[doc(hidden)] +#[cfg(target_arch = "wasm32")] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/test/wasm_component/hello_world/src/lib.rs b/test/wasm_component/hello_world/src/lib.rs new file mode 100644 index 00000000..a1e40ef6 --- /dev/null +++ b/test/wasm_component/hello_world/src/lib.rs @@ -0,0 +1,31 @@ +use wasi::http::types::{ + Fields, IncomingRequest, OutgoingBody, OutgoingResponse, ResponseOutparam, +}; + +wasi::http::proxy::export!(Component); + +struct Component; + +impl wasi::exports::http::incoming_handler::Guest for Component { + fn handle(_request: IncomingRequest, response_out: ResponseOutparam) { + + let hdrs = Fields::new(); + let mesg = String::from("Hello"); + let _try = hdrs.set(&"Content-Type".to_string(), &[b"plain/text".to_vec()]); + let _try = hdrs.set(&"Content-Length".to_string(), &[mesg.len().to_string().as_bytes().to_vec()]); + + let resp = OutgoingResponse::new(hdrs); + + // Add the HTTP Response Status Code + resp.set_status_code(200).unwrap(); + + let body = resp.body().unwrap(); + ResponseOutparam::set(response_out, Ok(resp)); + + let out = body.write().unwrap(); + out.blocking_write_and_flush(mesg.as_bytes()).unwrap(); + drop(out); + + OutgoingBody::finish(body, None).unwrap(); + } +} diff --git a/test/wasm_component/hello_world/wit/world.wit b/test/wasm_component/hello_world/wit/world.wit new file mode 100644 index 00000000..82c810ef --- /dev/null +++ b/test/wasm_component/hello_world/wit/world.wit @@ -0,0 +1,6 @@ +package component:test-wasi-component; + +/// An example world for the component to target. +world example { + export hello-world: func() -> string; +} diff --git a/tools/README.md b/tools/README.md index e7caae34..daa5a9f3 100644 --- a/tools/README.md +++ b/tools/README.md @@ -6,6 +6,7 @@ should be considered experimental. * [`setup-unit`](#setup-unit) * [`unitc`](#unitc) +* [`unitctl`](#unitctl) --- @@ -104,3 +105,14 @@ UNIT_CTRL=docker://4d0431488982 unitc /status/requests/total ``` --- + +## unitctl + +### NGINX Unit Rust SDK and unitctl CLI + +This project provides a Rust SDK interface to the +[NGINX UNIT](https://unit.nginx.org/) +[control API](https://unit.nginx.org/howto/source/#source-startup) +and a CLI (`unitctl`) that exposes the functionality provided by the SDK. + +--- diff --git a/tools/unitc b/tools/unitc index 9fba4c6d..e1cc77c2 100755 --- a/tools/unitc +++ b/tools/unitc @@ -249,41 +249,58 @@ else NOLOG=1 fi +# Set the base curl command after testing for newer features +# +$RPC_CMD curl --fail-with-body --version > /dev/null 2>&1 +if [ $? -eq 0 ]; then + CURL_CMD="$RPC_CMD curl --silent --fail-with-body" +else + CURL_CMD="$RPC_CMD curl --silent --fail" +fi + # Adjust HTTP method and curl params based on presence of stdin payload # if [ -t 0 ] && [ ${#CONF_FILES[@]} -eq 0 ]; then if [ "$METHOD" = "DELETE" ]; then - $RPC_CMD curl -X $METHOD $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + $CURL_CMD -X $METHOD $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT elif [ "$METHOD" = "EDIT" ]; then EDITOR=$(test "$EDITOR" && printf '%s' "$EDITOR" || command -v editor || command -v vim || echo vi) EDIT_FILENAME=/tmp/${0##*/}.$$${URI//\//_} - $RPC_CMD curl -fsS $UNIT_CTRL$URI > $EDIT_FILENAME || exit 2 + $CURL_CMD -S $UNIT_CTRL$URI > $EDIT_FILENAME || exit 2 if [ "${URI:0:12}" = "/js_modules/" ]; then if ! hash jq 2> /dev/null; then echo "${0##*/}: ERROR: jq(1) is required to edit JavaScript modules; install at <https://stedolan.github.io/jq/>" exit 1 fi jq -r < $EDIT_FILENAME > $EDIT_FILENAME.js # Unescape linebreaks for a better editing experience + cp $EDIT_FILENAME.js /tmp/${0##*/}.$$bak EDIT_FILE=$EDIT_FILENAME.js $EDITOR $EDIT_FILENAME.js || exit 2 - # Remove the references, delete old config, push new config+reference - $RPC_CMD curl -fsS $UNIT_CTRL/config/settings/js_module > /tmp/${0##*/}.$$_js_module && \ - $RPC_CMD curl -X DELETE $UNIT_CTRL/config/settings/js_module && \ - $RPC_CMD curl -fsSX DELETE $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \ - printf "%s" "$(< $EDIT_FILENAME.js)" | $RPC_CMD curl -fX PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \ - cat /tmp/${0##*/}.$$_js_module | $RPC_CMD curl -X PUT --data-binary @- $UNIT_CTRL/config/settings/js_module 2> /tmp/${0##*/}.$$ + # Test if this module is enabled + $CURL_CMD $UNIT_CTRL/config/settings/js_module > /tmp/${0##*/}.$$_js_module + if [ $? -eq 0 ]; then + # Remove the references, delete old module, push new module+reference + $CURL_CMD -X DELETE $UNIT_CTRL/config/settings/js_module && \ + $CURL_CMD -X DELETE $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \ + printf "%s" "$(< $EDIT_FILENAME.js)" | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \ + cat /tmp/${0##*/}.$$_js_module | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL/config/settings/js_module 2> /tmp/${0##*/}.$$ + else + # Delete then re-apply the module + $CURL_CMD -X DELETE $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ && \ + printf "%s" "$(< $EDIT_FILENAME.js)" | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI 2>&1 2> /tmp/${0##*/}.$$ + fi elif [ $CONVERT -eq 1 ]; then $CONVERT_FROM_JSON < $EDIT_FILENAME > $EDIT_FILENAME.yaml $EDITOR $EDIT_FILENAME.yaml || exit 2 - $CONVERT_TO_JSON < $EDIT_FILENAME.yaml | $RPC_CMD curl -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + $CONVERT_TO_JSON < $EDIT_FILENAME.yaml | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT else tr -d '\r' < $EDIT_FILENAME > $EDIT_FILENAME.json # Remove carriage-return from newlines $EDITOR $EDIT_FILENAME.json || exit 2 - cat $EDIT_FILENAME.json | $RPC_CMD curl -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + cat $EDIT_FILENAME.json | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT fi else SHOW_LOG=$(echo $URI | grep -c ^/control/) - $RPC_CMD curl $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + $CURL_CMD $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT fi else if [ "$METHOD" = "INSERT" ]; then @@ -293,9 +310,9 @@ else fi NEW_ELEMENT=$(cat ${CONF_FILES[@]}) echo $NEW_ELEMENT | jq > /dev/null || exit $? # Test the input is valid JSON before proceeding - OLD_ARRAY=$($RPC_CMD curl -s $UNIT_CTRL$URI) + OLD_ARRAY=$($CURL_CMD -s $UNIT_CTRL$URI) if [ "$(echo $OLD_ARRAY | jq -r type)" = "array" ]; then - echo $OLD_ARRAY | jq ". |= [$NEW_ELEMENT] + ." | $RPC_CMD curl -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + echo $OLD_ARRAY | jq ". |= [$NEW_ELEMENT] + ." | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT else echo "${0##*/}: ERROR: the INSERT method expects an array" exit 3 @@ -305,12 +322,31 @@ else cat ${CONF_FILES[@]} | $CONVERT_TO_JSON > /tmp/${0##*/}.$$_json CONF_FILES=(/tmp/${0##*/}.$$_json) fi - cat ${CONF_FILES[@]} | $RPC_CMD curl -X $METHOD --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT + cat ${CONF_FILES[@]} | $CURL_CMD -X $METHOD --data-binary @- $UNIT_CTRL$URI 2> /tmp/${0##*/}.$$ | $OUTPUT fi fi -CURL_STATUS=${PIPESTATUS[0]} -if [ $CURL_STATUS -ne 0 ]; then +CURL_STATUS=${PIPESTATUS[1]} +if [ $CURL_STATUS -eq 0 ]; then + rm -f /tmp/${0##*/}.$$* 2> /dev/null + if [ $SHOW_LOG -gt 0 ] && [ $NOLOG -eq 0 ] && [ $QUIET -eq 0 ]; then + echo -n "${0##*/}: Waiting for log..." + sleep $SHOW_LOG + echo "" + sed -n $((LOG_LEN+1)),\$p $ERROR_LOG + fi +elif [ $CURL_STATUS -eq 22 ]; then + echo "${0##*/}: ERROR: configuration not applied" + if [ "$METHOD" = "EDIT" ]; then + if [ -f /tmp/${0##*/}.$$_js_module ]; then + echo "${0##*/}: NOTICE: restoring previous configuration" + printf "%s" "$(< /tmp/${0##*/}.$$bak)" | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL$URI && \ + cat /tmp/${0##*/}.$$_js_module | $CURL_CMD -X PUT --data-binary @- $UNIT_CTRL/config/settings/js_module 2> /tmp/${0##*/}.$$ + fi + echo "${0##*/}: NOTICE: $(ls $EDIT_FILENAME.*) contains unapplied edits" + rm /tmp/${0##*/}.$$ $EDIT_FILENAME + fi +else echo "${0##*/}: ERROR: curl(1) exited with an error ($CURL_STATUS)" if [ $CURL_STATUS -eq 7 ] && [ $REMOTE -eq 0 ]; then echo "${0##*/}: Check that you have permission to access the Unit control socket, or try again with sudo(8)" @@ -320,11 +356,3 @@ if [ $CURL_STATUS -ne 0 ]; then fi exit 4 fi -rm -f /tmp/${0##*/}.$$* 2> /dev/null - -if [ $SHOW_LOG -gt 0 ] && [ $NOLOG -eq 0 ] && [ $QUIET -eq 0 ]; then - echo -n "${0##*/}: Waiting for log..." - sleep $SHOW_LOG - echo "" - sed -n $((LOG_LEN+1)),\$p $ERROR_LOG -fi diff --git a/tools/unitctl/.cargo/config.toml b/tools/unitctl/.cargo/config.toml new file mode 100644 index 00000000..ff7f7580 --- /dev/null +++ b/tools/unitctl/.cargo/config.toml @@ -0,0 +1,2 @@ +[target.aarch64-unknown-linux-gnu] +linker = "aarch64-linux-gnu-gcc"
\ No newline at end of file diff --git a/tools/unitctl/.gitignore b/tools/unitctl/.gitignore new file mode 100644 index 00000000..2319f815 --- /dev/null +++ b/tools/unitctl/.gitignore @@ -0,0 +1,16 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# These are backup files generated by rustfmt +**/*.rs.bk + +# Ignore OpenAPI cache files +.openapi_cache +# Ignore generated OpenAPI documentation +unit-openapi/docs +# Ignore autogenerated OpenAPI code +unit-openapi/src + +config
\ No newline at end of file diff --git a/tools/unitctl/Cargo.lock b/tools/unitctl/Cargo.lock new file mode 100644 index 00000000..58f07b8b --- /dev/null +++ b/tools/unitctl/Cargo.lock @@ -0,0 +1,2476 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" + +[[package]] +name = "anstyle-parse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "aws-lc-rs" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5509d663b2c00ee421bda8d6a24d6c42e15970957de1701b8df9f6fbe5707df1" +dependencies = [ + "aws-lc-sys", + "mirai-annotations", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d5d317212c2a78d86ba6622e969413c38847b62f48111f8b763af3dac2f9840" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" + +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + +[[package]] +name = "bindgen" +version = "0.69.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +dependencies = [ + "bitflags 2.4.1", + "cexpr", + "clang-sys", + "itertools", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.60", + "which 4.4.2", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "block-buffer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bollard" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0aed08d3adb6ebe0eff737115056652670ae290f177759aac19c30456135f94c" +dependencies = [ + "base64 0.22.0", + "bollard-stubs", + "bytes", + "futures-core", + "futures-util", + "hex", + "http 1.1.0", + "http-body-util", + "hyper 1.3.1", + "hyper-named-pipe", + "hyper-util", + "hyperlocal-next", + "log", + "pin-project-lite", + "serde", + "serde_derive", + "serde_json", + "serde_repr", + "serde_urlencoded", + "thiserror", + "tokio", + "tokio-util", + "tower-service", + "url", + "winapi", +] + +[[package]] +name = "bollard-stubs" +version = "1.44.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "709d9aa1c37abb89d40f19f5d0ad6f0d88cb1581264e571c9350fc5bb89cf1c5" +dependencies = [ + "serde", + "serde_repr", + "serde_with", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytes" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" + +[[package]] +name = "cc" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d32a725bc159af97c3e629873bb9f88fb8cf8a4867175f76dc987815ea07c83b" +dependencies = [ + "jobserver", + "libc", + "once_cell", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.0", +] + +[[package]] +name = "clang-sys" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52bdc885e4cacc7f7c9eedc1ef6da641603180c783c41a15c264944deeaab642" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb7fb5e4e979aec3be7791562fcba452f94ad85e954da024396433e0e25a79e9" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "clap_lex" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" + +[[package]] +name = "cmake" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" +dependencies = [ + "cc", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "colored_json" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79cff32df5cfea75e6484eeff0b4e48ad3977fb6582676a7862b3590dddc7a87" +dependencies = [ + "serde", + "serde_json", + "yansi", +] + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "custom_error" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f8a51dd197fa6ba5b4dc98a990a43cc13693c23eb0089ebb0fcc1f04152bca6" + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dunce" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + +[[package]] +name = "either" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "filetime" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "windows-sys 0.52.0", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "http" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http 0.2.8", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 0.2.8", + "http-body 0.4.5", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.4.7", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-named-pipe" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278" +dependencies = [ + "hex", + "hyper 1.3.1", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", + "winapi", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.27", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.3.1", + "pin-project-lite", + "socket2 0.5.5", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "hyperlocal" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c" +dependencies = [ + "futures-util", + "hex", + "hyper 0.14.27", + "pin-project", + "tokio", +] + +[[package]] +name = "hyperlocal-next" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf569d43fa9848e510358c07b80f4adf34084ddc28c6a4a651ee8474c070dcc" +dependencies = [ + "hex", + "http-body-util", + "hyper 1.3.1", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", + "serde", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" + +[[package]] +name = "jobserver" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libloading" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +dependencies = [ + "cfg-if", + "windows-targets 0.52.0", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +dependencies = [ + "serde", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "ntapi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc51db7b362b205941f71232e56c625156eb9a929f8cf74a428fd5bc094a4afc" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-json" +version = "0.89.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "563eff2fa513ceee37a147701a75e259b4514b31b0bac3496f16297851946caf" +dependencies = [ + "linked-hash-map", + "num-traits", + "serde", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +dependencies = [ + "bitflags 2.4.1", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "pbr" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed5827dfa0d69b6c92493d6c38e633bbaa5937c153d0d7c28bf12313f8c6d514" +dependencies = [ + "crossbeam-channel", + "libc", + "winapi", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f6e86fb9e7026527a0d46bc308b841d73170ef8f443e1807f6ef88526a816d4" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96504449aa860c8dcde14f9fba5c58dc6658688ca1fe363589d6327b8662c603" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "798e0220d1111ae63d66cb66a5dcb3fc2d986d520b98e49e1852bfdb11d7c5e7" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 1.0.103", +] + +[[package]] +name = "pest_meta" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "984298b75898e30a843e278a9f2452c31e349a073a0ce6fd950a12a74464e065" +dependencies = [ + "once_cell", + "pest", + "sha1", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.103", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prettyplease" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ac2cf0f2e4f42b49f5ffd07dae8d746508ef7526c13940e5f524012ae6c6550" +dependencies = [ + "proc-macro2", + "syn 2.0.60", +] + +[[package]] +name = "proc-macro2" +version = "1.0.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rayon" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.23.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afabcee0551bd1aa3e18e5adbf2c0544722014b899adb31bd186ec638d3da97e" +dependencies = [ + "aws-lc-rs", + "log", + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4" +dependencies = [ + "base64 0.21.5", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beb461507cee2c2ff151784c52762cf4d9ff6a61f3e80968600ed24fa837fa54" + +[[package]] +name = "rustls-webpki" +version = "0.102.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3bce581c0dd41bce533ce695a1437fa16a7ab5ac3ccfa99fe1a620a7885eabf" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "ryu" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" +dependencies = [ + "lazy_static", + "windows-sys 0.36.1", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "security-framework" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.198" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.198" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "serde_json" +version = "1.0.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" +dependencies = [ + "indexmap 1.9.1", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c85f8e96d1d6857f13768fcbd895fcb06225510022a2774ed8b5150581847b0" +dependencies = [ + "base64 0.22.0", + "chrono", + "hex", + "indexmap 1.9.1", + "indexmap 2.2.6", + "serde", + "serde_derive", + "serde_json", + "time", +] + +[[package]] +name = "serde_yaml" +version = "0.9.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d232d893b10de3eb7258ff01974d6ee20663d8e833263c99409d4b13a0209da" +dependencies = [ + "indexmap 1.9.1", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "slab" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sysinfo" +version = "0.30.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb4f3438c8f6389c864e61221cbc97e9bca98b4daf39a5beb7bea660f528bb2" +dependencies = [ + "cfg-if", + "core-foundation-sys", + "libc", + "ntapi", + "once_cell", + "rayon", + "windows", +] + +[[package]] +name = "tar" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tempfile" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "thiserror" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.103", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tokio" +version = "1.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2 0.5.5", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "unicode-bidi" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" + +[[package]] +name = "unicode-ident" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unit-client-rs" +version = "1.33.0" +dependencies = [ + "bollard", + "custom_error", + "futures", + "hex", + "hyper 0.14.27", + "hyper-tls", + "hyperlocal", + "pbr", + "rand", + "regex", + "rustls", + "serde", + "serde_json", + "sysinfo", + "tokio", + "unit-openapi", + "which 5.0.0", +] + +[[package]] +name = "unit-openapi" +version = "1.33.0" +dependencies = [ + "base64 0.21.5", + "futures", + "http 0.2.8", + "hyper 0.14.27", + "serde", + "serde_derive", + "serde_json", + "url", +] + +[[package]] +name = "unitctl" +version = "1.33.0" +dependencies = [ + "clap", + "colored_json", + "custom_error", + "futures", + "hyper 0.14.27", + "hyper-tls", + "hyperlocal", + "json5", + "nu-json", + "rustls-pemfile", + "serde", + "serde_json", + "serde_yaml", + "tar", + "tempfile", + "tokio", + "unit-client-rs", + "walkdir", + "which 5.0.0", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab4c90930b95a82d00dc9e9ac071b4991924390d46cbd0dfe566148667605e4b" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.60", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "which" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bf3ea8596f3a0dd5980b46430f2058dfe2c36a27ccfbb1845d6fbfcd9ba6e14" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +dependencies = [ + "windows-core", + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-sys" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +dependencies = [ + "windows_aarch64_msvc 0.36.1", + "windows_i686_gnu 0.36.1", + "windows_i686_msvc 0.36.1", + "windows_x86_64_gnu 0.36.1", + "windows_x86_64_msvc 0.36.1", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "xattr" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" +dependencies = [ + "libc", + "linux-raw-sys", + "rustix", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/tools/unitctl/Cargo.toml b/tools/unitctl/Cargo.toml new file mode 100644 index 00000000..c9c6a272 --- /dev/null +++ b/tools/unitctl/Cargo.toml @@ -0,0 +1,8 @@ +[workspace] +resolver = "2" + +members = [ + "unit-openapi", + "unit-client-rs", + "unitctl" +]
\ No newline at end of file diff --git a/tools/unitctl/Dockerfile b/tools/unitctl/Dockerfile new file mode 100644 index 00000000..812ce28c --- /dev/null +++ b/tools/unitctl/Dockerfile @@ -0,0 +1,37 @@ +FROM rust:slim-bullseye + +ADD https://unit.nginx.org/keys/nginx-keyring.gpg \ + /usr/share/keyrings/nginx-keyring.gpg + +RUN set -eux \ + export DEBIAN_FRONTEND=noninteractive; \ + echo 'fc27fd284cceb4bf6c8ac2118dbb5e834590836f8d6ba3944da0e0451cbadeca /usr/share/keyrings/nginx-keyring.gpg' |\ + sha256sum --check -; \ + chmod 0644 /usr/share/keyrings/nginx-keyring.gpg; \ + echo "deb [signed-by=/usr/share/keyrings/nginx-keyring.gpg] https://packages.nginx.org/unit/debian/ bullseye unit" > /etc/apt/sources.list.d/unit.list; \ + apt-get -qq update; \ + apt-get -qq upgrade --yes; \ + apt-get -qq install --yes --no-install-recommends --no-install-suggests \ + bsdmainutils \ + ca-certificates \ + git \ + gzip \ + grep \ + gawk \ + sed \ + make \ + rpm \ + pkg-config \ + libssl-dev \ + dpkg-dev \ + musl-dev \ + musl-tools \ + unit \ + gcc-aarch64-linux-gnu \ + libc6-dev-arm64-cross \ + gcc-x86-64-linux-gnu \ + libc6-dev-amd64-cross; \ + rustup target install x86_64-unknown-linux-gnu aarch64-unknown-linux-gnu x86_64-unknown-linux-musl; \ + cargo install --quiet cargo-deb cargo-generate-rpm; \ + rm -rf /var/lib/apt/lists/* /var/tmp/* /tmp/*; \ + git config --global --add safe.directory /project diff --git a/tools/unitctl/GNUmakefile b/tools/unitctl/GNUmakefile new file mode 100644 index 00000000..3ae8e34c --- /dev/null +++ b/tools/unitctl/GNUmakefile @@ -0,0 +1,145 @@ +MAKE_MAJOR_VER := $(shell echo $(MAKE_VERSION) | cut -d'.' -f1) + +ifneq ($(shell test $(MAKE_MAJOR_VER) -gt 3; echo $$?),0) +$(error Make version $(MAKE_VERSION) not supported, please install GNU Make 4.x) +endif + +GREP ?= $(shell command -v ggrep 2> /dev/null || command -v grep 2> /dev/null) +SED ?= $(shell command -v gsed 2> /dev/null || command -v sed 2> /dev/null) +AWK ?= $(shell command -v gawk 2> /dev/null || command -v awk 2> /dev/null) +RUSTUP ?= $(shell command -v rustup 2> /dev/null) +ifeq ($(RUSTUP),) +$(error Please install Rustup) +endif + +RPM_ARCH := $(shell uname -m) +VERSION ?= $(shell $(GREP) -Po '^version\s+=\s+"\K.*?(?=")' $(CURDIR)/unitctl/Cargo.toml) +SRC_REPO := https://github.com/nginxinc/unit-rust-sdk +DEFAULT_TARGET ?= $(shell $(RUSTUP) toolchain list | $(GREP) '(default)' | cut -d' ' -f1 | cut -d- -f2-) +SHELL := /bin/bash +OUTPUT_BINARY ?= unitctl +PACKAGE_NAME ?= unitctl +CARGO ?= cargo +DOCKER ?= docker +DOCKER_BUILD_FLAGS ?= --load +CHECKSUM ?= sha256sum +OPENAPI_GENERATOR_VERSION ?= 7.6.0 + +# Define platform targets based off of the current host OS +# If running MacOS, then build for MacOS platform targets installed in rustup +# If running Linux, then build for Linux platform targets installed in rustup +ifeq ($(shell uname -s),Darwin) + TARGETS := $(sort $(shell $(RUSTUP) target list | \ + $(GREP) '(installed)' | \ + $(GREP) 'apple' | \ + cut -d' ' -f1)) +else ifeq ($(shell uname -s),Linux) + TARGETS := $(sort $(shell $(RUSTUP) target list | \ + $(GREP) '(installed)' | \ + $(GREP) 'linux' | \ + cut -d' ' -f1)) +else + TARGETS := $(DEFAULT_TARGET) +endif + +RELEASE_BUILD_FLAGS ?= --quiet --release --bin $(OUTPUT_BINARY) + +Q = $(if $(filter 1,$V),,@) +M = $(shell printf "\033[34;1m▶\033[0m") + +.PHONY: help +help: + @$(GREP) --no-filename -E '^[ a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \ + $(AWK) 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-28s\033[0m %s\n", $$1, $$2}' | \ + sort + +.PHONY: clean +clean: ; $(info $(M) cleaning...)@ ## Cleanup everything + $Q rm -rf $(CURDIR)/target + +.PHONY: list-targets +list-targets: ## List all available platform targets + $Q echo $(TARGETS) | $(SED) -e 's/ /\n/g' + +.PHONY: all +all: $(TARGETS) ## Build all available platform targets [see: list-targets] + +.PHONY: $(TARGETS) +.ONESHELL: $(TARGETS) +$(TARGETS): openapi-generate + $Q if [ ! -f "$(CURDIR)/target/$(@)/release/$(OUTPUT_BINARY)" ]; then + echo "$(M) building $(OUTPUT_BINARY) with flags [$(RELEASE_BUILD_FLAGS) --target $(@)]" + $(CARGO) build $(RELEASE_BUILD_FLAGS) --target $@ + fi + +target target/debug: + $Q mkdir -p $@ + +.PHONY: debug +debug: target/debug/$(OUTPUT_BINARY) + +target/debug/$(OUTPUT_BINARY): openapi-generate + $Q echo "$(M) building $(OUTPUT_BINARY) in debug mode for the current platform" + $Q $(CARGO) build --bin $(OUTPUT_BINARY) + +.PHONY: release +release: target/release/$(OUTPUT_BINARY) + +target/release/$(OUTPUT_BINARY): openapi-generate + $Q echo "$(M) building $(OUTPUT_BINARY) in release mode for the current platform" + $Q $(CARGO) build $(RELEASE_BUILD_FLAGS) + +.PHONY: test +test: ## Run tests + $Q $(CARGO) test + +.ONESHELL: target/man/$(OUTPUT_BINARY).1.gz +target/man/$(OUTPUT_BINARY).1.gz: + $Q $(info $(M) building distributable manpage) + mkdir -p target/man + $(SED) 's/%%VERSION%%/$(VERSION)/' \ + man/$(OUTPUT_BINARY).1 > $(CURDIR)/target/man/$(OUTPUT_BINARY).1 + gzip $(CURDIR)/target/man/$(OUTPUT_BINARY).1 + +target/gz: + $Q mkdir -p target/gz + +.PHONY: manpage +manpage: target/man/$(OUTPUT_BINARY).1.gz ## Builds man page + +.openapi_cache: + $Q mkdir -p $@ + +## Generate (or regenerate) Unit API access code via a OpenAPI spec +.PHONY: openapi-generate +openapi-generate: .openapi_cache + $Q if [ ! -f "$(CURDIR)/unit-openapi/src/models/mod.rs" ]; then + echo "$(M) generating Unit API access code via a OpenAPI spec" + OPENAPI_GENERATOR_VERSION="$(OPENAPI_GENERATOR_VERSION)" \ + OPENAPI_GENERATOR_DOWNLOAD_CACHE_DIR="$(CURDIR)/.openapi_cache" \ + $(CURDIR)/build/openapi-generator-cli.sh \ + generate \ + --input-spec "$(CURDIR)/../../docs/unit-openapi.yaml" \ + --config "$(CURDIR)/openapi-config.json" \ + --template-dir "$(CURDIR)/unit-openapi/openapi-templates" \ + --output "$(CURDIR)/unit-openapi" \ + --generator-name rust + echo "mod error;" >> "$(CURDIR)/unit-openapi/src/apis/mod.rs" + $(SED) -i '1i #![allow(clippy::all)]' "$(CURDIR)/unit-openapi/src/lib.rs" + $(CARGO) fmt + fi + +.PHONY: openapi-clean +openapi-clean: ## Clean up generated OpenAPI files + $Q $(info $(M) cleaning up generated OpenAPI documentation) + $Q rm -rf "$(CURDIR)/unit-openapi/docs/*" + $Q $(info $(M) cleaning up generated OpenAPI api code) + $Q find "$(CURDIR)/unit-openapi/src/apis" \ + ! -name 'error.rs' -type f -exec rm -f {} + + $Q $(info $(M) cleaning up generated OpenAPI models code) + $Q rm -rf "$(CURDIR)/unit-openapi/src/models" + +include $(CURDIR)/build/package.mk +include $(CURDIR)/build/container.mk +include $(CURDIR)/build/release.mk +include $(CURDIR)/build/github.mk diff --git a/tools/unitctl/HomebrewFormula b/tools/unitctl/HomebrewFormula new file mode 120000 index 00000000..1ffaf042 --- /dev/null +++ b/tools/unitctl/HomebrewFormula @@ -0,0 +1 @@ +pkg/brew
\ No newline at end of file diff --git a/tools/unitctl/README.md b/tools/unitctl/README.md new file mode 100644 index 00000000..bcd31006 --- /dev/null +++ b/tools/unitctl/README.md @@ -0,0 +1,317 @@ +# NGINX Unit Rust SDK and CLI + +This project provides a Rust SDK interface to the +[NGINX Unit](https://unit.nginx.org/) +[control API](https://unit.nginx.org/howto/source/#source-startup) +and a CLI (`unitctl`) that exposes the functionality provided by the SDK. + +## Installation and Use +In order to build and use `unitctl` one needs a working installation of +Cargo. It is recommended to procure Cargo with Rustup. Rustup is packaged +for use in many systems, but you can also find it at its +[Official Site](https://rustup.rs/). Additionally, Macintosh users will +need to install GNU core utilities using brew (see the following command) + +``` +$ brew install make gnu-sed grep gawk maven +``` + +After installing a modern distribution of Make, Macintosh users can invoke +the makefile commands using `gmake`. For example: `gmake clean` or `gmake all`. + +Finally, in order to run the OpenAPI code generation tooling, Users will +need a working +[Java runtime](https://www.java.com/en/) +as well as Maven. Macintosh users can install Maven from Brew. + +With a working installation of Cargo it is advised to build unitctl with the +provided makefile. The `list-targets` target will inform the user of what +platforms are available to be built. One or more of these can then be run as +their own makefile targets. Alternatively, all available binary targets can be +built with `make all`. See the below example for illustration: + +``` +$ make list-targets +x86_64-unknown-linux-gnu + +$ make x86_64-unknown-linux-gnu +▶ building unitctl with flags [--quiet --release --bin unitctl --target x86_64-unknown-linux-gnu] + +$ file ./target/x86_64-unknown-linux-gnu/release/unitctl +./target/x86_64-unknown-linux-gnu/release/unitctl: ELF 64-bit LSB pie executable, +x86-64, version 1 (SYSV), dynamically linked, interpreter /lib64/ld-linux-x86-64.so.2, +BuildID[sha1]=ef4b094ffd549b39a8cb27a7ba2cc0dbad87a3bc, for GNU/Linux 4.4.0, +with debug_info, not stripped +``` + +As demonstrated in the example above, compiled binaries may be found in the +targets folder, under the subdirectory corresponding to the build target +desired. + + +## Features (Current) + +``` +CLI interface to the NGINX Unit Control API + +Usage: unitctl [OPTIONS] <COMMAND> + +Commands: + instances List all running Unit processes + edit Open current Unit configuration in editor + import Import configuration from a directory + execute Sends raw JSON payload to Unit + status Get the current status of Unit + listeners List active listeners + apps List all configured Unit applications + export Export the current configuration of Unit + help Print this message or the help of the given subcommand(s) + +Options: + -s, --control-socket-address <CONTROL_SOCKET_ADDRESS> + Path (unix:/var/run/unit/control.sock), tcp address with port (127.0.0.1:80), or URL. This flag can be specified multiple times. + -w, --wait-timeout-seconds <WAIT_TIME_SECONDS> + Number of seconds to wait for control socket to become available + -t, --wait-max-tries <WAIT_MAX_TRIES> + Number of times to try to access control socket when waiting [default: 3] + -h, --help + Print help + -V, --version + Print version +``` + +- Consumes alternative configuration formats Like YAML and converts them +- Can convert output to multiple different formats (YAML, plain JSON, highlighted JSON) +- Syntactic highlighting of JSON output +- Interpretation of Unit errors with (arguably more) useful error messages + +### Lists all running Unit processes and provides details about each process. +Unitctl will detect and connect to running process of Unit on the host. +It will pull information about the running Unit configuration +(including how to access its control API) from the process information of +each detected Unit process. + +``` +$ unitctl instances +No socket path provided - attempting to detect from running instance +unitd instance [pid: 79489, version: 1.32.0]: + Executable: /opt/unit/sbin/unitd + API control unix socket: unix:/opt/unit/control.unit.sock + Child processes ids: 79489, 79489 + Runtime flags: --no-daemon + Configure options: --prefix=/opt/unit --user=elijah --group=elijah --openssl +``` + +### Start a new Unit process via docker +Unitctl can launch new containers of Unit. +These can be official Unit images or custom Unit images. +Any container that calls `unitd` in a CMD declaration will suffice. + +The new containers will then be shown in a call to +`unitctl instances` + +``` +$ unitctl instances new /tmp/2 $(pwd) 'unit:wasm' +Pulling and starting a container from unit:wasm +Will mount /tmp/2 to /var/run for socket access +Will mount /home/user/repositories/nginx/unit/tools/unitctl to /www for application access +Note: Container will be on host network + +``` + +To the subcommand `unitctl instances new` the user must provide three arguments: +1. **A means of showing the control API:** + There are two possibilities for this argument. + A filepath on which to open a unix socket, + or a TCP address. + - If a directory is specified the Unit container + will mount this to `/var/run` internally. + Thus, the control socket and pid file will be + accessible from the host. For example: `/tmp/2`. + - If a TCP endpoint is specified Unit will be configured + to offer its control API on the given port and address. + For example: `127.0.0.1:7171`. +2. **A path to an application:** + In the example, `$(pwd)` is provided. The Unit container will mount + this to `/www/`. This will allow the user to configure their + Unit container to expose an application stored on the host. +3. **An image tag:** + In the example, `unit:wasm` is used. This will be the image that unitctl + will deploy. Custom repos and images can be deployed in this manner. + +In addition to the above arguments, the user may add the `-r` flag. This flag will +set the Docker volume mount for the application directory to be read only. Do note +that this flag will break compatibility with WordPress, and other applications +which store state on the file system. + +After deployment the user will have one Unit container running on the host network. + +### Lists active applications and provides means to restart them +Unitctl can list running applications by accessing the specified control API. +Unitctl can also request from the API that an application be restarted. + +Listing applications: +``` +$ unitctl apps list +{ + "wasm": { + "type": "wasm-wasi-component", + "component": "/www/wasmapp-proxy-component.wasm" + } +} +``` + +Restarting an application: +``` +$ unitctl apps restart wasm +{ + "success": "Ok" +} +``` + +*Note:* Both of the above commands support operating on multiple instances +of Unit at once. To do this, pass multiple values for the `-s` flag as +shown below: + +``` +$ unitctl -s '127.0.0.1:8001' -s /run/nginx-unit.control.sock app list +``` + +### Lists active listeners from running Unit processes +Unitctl can query a given control API to fetch all configured +listeners. + +``` +unitctl listeners +No socket path provided - attempting to detect from running instance +{ + "127.0.0.1:8080": { + "pass": "routes" + } +} +``` + +*Note:* This command supports operating on multiple instances of Unit at once. +To do this, pass multiple values for the `-s` flag as shown below: + +``` +$ unitctl -s '127.0.0.1:8001' -s /run/nginx-unit.control.sock listeners +``` + +### Get the current status of NGINX Unit processes +Unitctl can query the control API to provide the status of the running +Unit daemon. + +``` +$ unitctl status -t yaml +No socket path provided - attempting to detect from running instance +connections: + accepted: 0 + active: 0 + idle: 0 + closed: 0 +requests: + total: 0 +applications: {} +``` + +*Note:* This command supports operating on multiple instances of Unit at once. +To do this, pass multiple values for the `-s` flag as shown below: + +``` +$ unitctl -s '127.0.0.1:8001' -s /run/nginx-unit.control.sock status +``` + +### Send arbitrary configuration payloads to Unit +Unitctl can accept custom request payloads and query given API endpoints with them. +The request payload must be passed in using the `-f` flag either as a filename or +using the `-` filename to denote the use of stdin as shown in the example below. + +``` +$ echo '{ + "listeners": { + "127.0.0.1:8080": { + "pass": "routes" + } + }, + + "routes": [ + { + "action": { + "share": "/www/data$uri" + } + } + ] +}' | unitctl execute --http-method PUT --path /config -f - +{ + "success": "Reconfiguration done." +} +``` + +*Note:* This command supports operating on multiple instances of Unit at once. +To do this, pass multiple values for the `-s` flag as shown below: + +``` +$ unitctl -s '127.0.0.1:8001' -s /run/nginx-unit.control.sock execute ... +``` + +### Edit current configuration in your favorite editor +Unitctl can fetch the configuration from a running instance of Unit and +load it in any number of preconfigured editors on your command line. + +Unitctl will try to use whatever editor is configured with the `EDITOR` +environment variable, but will default to vim, emacs, nano, vi, or pico. + +``` +$ unitctl edit +[[EDITOR LOADS SHOWING CURRENT CONFIGURATION - USER EDITS AND SAVES]] + +{ + "success": "Reconfiguration done." +} +``` + +*Note:* This command does not support operating on multiple instances of Unit at once. + +### Import configuration, certificates, and NJS modules from directory +Unitctl will parse existing configuration, certificates, and NJS modules +stored in a directory and convert them into a payload to reconfigure a +given Unit daemon. + +``` +$ unitctl import /opt/unit/config +Imported /opt/unit/config/certificates/snake.pem -> /certificates/snake.pem +Imported /opt/unit/config/hello.js -> /js_modules/hello.js +Imported /opt/unit/config/put.json -> /config +Imported 3 files +``` + +### Export configuration from a running Unit instance +Unitctl will query a control API to fetch running configuration +and NJS modules from a Unit process. Due to a technical limitation +this output will not contain currently stored certificate bundles. +The output is saved as a tarball at the filename given with the `-f` +argument. Standard out may be used with `-f -` as shown in the +following examples. + +``` +$ unitctl export -f config.tar +$ unitctl export -f - +$ unitctl export -f - | tar xf - config.json +$ unitctl export -f - > config.tar +``` + +*Note:* The exported configuration omits certificates. + +*Note:* This command does not support operating on multiple instances of Unit at once. + +### Wait for socket to become available +All commands support waiting on unix sockets for availability. + +``` +$ unitctl --wait-timeout-seconds=3 --wait-max-tries=4 import /opt/unit/config` +Waiting for 3s control socket to be available try 2/4... +Waiting for 3s control socket to be available try 3/4... +Waiting for 3s control socket to be available try 4/4... +Timeout waiting for unit to start has been exceeded +``` diff --git a/tools/unitctl/build/container.mk b/tools/unitctl/build/container.mk new file mode 100644 index 00000000..c892db2e --- /dev/null +++ b/tools/unitctl/build/container.mk @@ -0,0 +1,67 @@ + ## Builds a container image for building on Debian Linux +.PHONY: container-debian-build-image +.ONESHELL: container-debian-build-image +container-debian-build-image: +container-debian-build-image: + $Q echo "$(M) building debian linux docker build image: $(@)" + $(DOCKER) buildx build $(DOCKER_BUILD_FLAGS)\ + -t debian_builder -f Dockerfile $(CURDIR); + + ## Builds deb packages using a container image +.PHONY: container-deb-packages +container-deb-packages: container-debian-build-image + $Q $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder make deb-packages + # Reset permissions on the target directory to the current user + if command -v id > /dev/null; then \ + $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder \ + chown --recursive "$(shell id -u):$(shell id -g)" /project/target + fi + + ## Builds a rpm packages using a container image +.PHONY: container-rpm-packages +container-rpm-packages: container-debian-build-image + $Q $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder make rpm-packages + # Reset permissions on the target directory to the current user + if command -v id > /dev/null; then \ + $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder chown --recursive \ + "$(shell id -u):$(shell id -g)" /project/target + fi + +## Builds all packages using a container image +.PHONY: container-all-packages +container-all-packages: container-debian-build-image + $Q $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder make all-packages + # Reset permissions on the target directory to the current user + if command -v id > /dev/null; then \ + $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder \ + chown --recursive "$(shell id -u):$(shell id -g)" /project/target + fi + +## Run tests inside container +.PHONY: container-test +container-test: container-debian-build-image + $Q $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder make test + # Reset permissions on the target directory to the current user + if command -v id > /dev/null; then \ + $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder \ + chown --recursive "$(shell id -u):$(shell id -g)" /project/target + fi + +.PHONY: container-shell +container-shell: container-debian-build-image ## Run tests inside container + $Q $(DOCKER) run -it --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder bash + # Reset permissions on the target directory to the current user + if command -v id > /dev/null; then \ + $(DOCKER) run --rm --volume "$(CURDIR):/project" \ + --workdir /project debian_builder \ + chown --recursive "$(shell id -u):$(shell id -g)" /project/target + fi diff --git a/tools/unitctl/build/github.mk b/tools/unitctl/build/github.mk new file mode 100644 index 00000000..4d31546f --- /dev/null +++ b/tools/unitctl/build/github.mk @@ -0,0 +1,22 @@ +.PHONY: gh-make-release +.ONESHELL: gh-make-release +gh-make-release: +ifndef CI + $(error must be running in CI) +endif +ifneq ($(shell git rev-parse --abbrev-ref HEAD),release-v$(VERSION)) + $(error must be running on release-v$(VERSION) branch) +endif + $(info $(M) updating files with release version [$(GIT_BRANCH)]) @ + git commit -m "ci: update files to version $(VERSION)" \ + Cargo.toml pkg/brew/$(PACKAGE_NAME).rb + git push origin "release-v$(VERSION)" + git tag -a "v$(VERSION)" -m "ci: tagging v$(VERSION)" + git push origin --tags + gh release create "v$(VERSION)" \ + --title "v$(VERSION)" \ + --notes-file $(CURDIR)/target/dist/release_notes.md \ + $(CURDIR)/target/dist/*.gz \ + $(CURDIR)/target/dist/*.deb \ + $(CURDIR)/target/dist/*.rpm \ + $(CURDIR)/target/dist/SHA256SUMS diff --git a/tools/unitctl/build/openapi-generator-cli.sh b/tools/unitctl/build/openapi-generator-cli.sh new file mode 100755 index 00000000..3a65e5ce --- /dev/null +++ b/tools/unitctl/build/openapi-generator-cli.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +# Source: https://github.com/OpenAPITools/openapi-generator/blob/master/bin/utils/openapi-generator-cli.sh +# License: Apache 2.0 + +#### +# Save as openapi-generator-cli on your PATH. chmod u+x. Enjoy. +# +# This script will query github on every invocation to pull the latest released +# version of openapi-generator. +# +# If you want repeatable executions, you can explicitly set a version via +# OPENAPI_GENERATOR_VERSION +# e.g. (in Bash) +# export OPENAPI_GENERATOR_VERSION=3.1.0 +# openapi-generator-cli.sh +# or +# OPENAPI_GENERATOR_VERSION=3.1.0 openapi-generator-cli.sh +# +# This is also helpful, for example, if you want to evaluate a SNAPSHOT version. +# +# NOTE: Jars are downloaded on demand from maven into the same directory as this +# script for every 'latest' version pulled from github. Consider putting this +# under its own directory. +#### +set -o pipefail + +for cmd in {mvn,jq,curl}; do + if ! command -v ${cmd} > /dev/null; then + >&2 echo "This script requires '${cmd}' to be installed." + exit 1 + fi +done + +function latest.tag { + local uri="https://api.github.com/repos/${1}/releases" + local ver=$(curl -s ${uri} | jq -r 'first(.[]|select(.prerelease==false)).tag_name') + if [[ $ver == v* ]]; then + ver=${ver:1} + fi + echo $ver +} + +ghrepo=openapitools/openapi-generator +groupid=org.openapitools +artifactid=openapi-generator-cli +ver=${OPENAPI_GENERATOR_VERSION:-$(latest.tag $ghrepo)} + +echo "Using OpenAPI Generator version: ${ver}" + +jar=${artifactid}-${ver}.jar +cachedir=${OPENAPI_GENERATOR_DOWNLOAD_CACHE_DIR} + +DIR=${cachedir:-"$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"} + +if [ ! -d "${DIR}" ]; then + mkdir -p "${DIR}" +fi + +if [ ! -f ${DIR}/${jar} ]; then + repo="central::default::https://repo1.maven.org/maven2/" + if [[ ${ver} =~ ^.*-SNAPSHOT$ ]]; then + repo="central::default::https://oss.sonatype.org/content/repositories/snapshots" + fi + mvn org.apache.maven.plugins:maven-dependency-plugin:2.9:get \ + -DremoteRepositories=${repo} \ + -Dartifact=${groupid}:${artifactid}:${ver} \ + -Dtransitive=false \ + -Ddest=${DIR}/${jar} +fi + +java -ea \ + ${JAVA_OPTS} \ + -Xms512M \ + -Xmx1024M \ + -server \ + -jar ${DIR}/${jar} "$@"
\ No newline at end of file diff --git a/tools/unitctl/build/package.mk b/tools/unitctl/build/package.mk new file mode 100644 index 00000000..7009e2b1 --- /dev/null +++ b/tools/unitctl/build/package.mk @@ -0,0 +1,139 @@ +.PHONY: install-packaging-deb +install-packaging-deb: + $Q if ! command -v cargo-deb > /dev/null; then \ + $(CARGO) install --quiet cargo-deb; \ + fi + +.PHONY: install-packaging-rpm +install-packaging-rpm: + $Q if ! command -v cargo-generate-rpm > /dev/null; then \ + $(CARGO) install --quiet cargo-generate-rpm; \ + fi + +## Installs tools needed for building distributable packages +.PHONY: install-packaging-tools +install-packaging-tools: + $Q $(CARGO) install --quiet cargo-deb cargo-generate-rpm + +target/dist: + $Q mkdir -p $@ + +## Builds all packages for all targets +.PHONY: all-packages +all-packages: deb-packages rpm-packages gz-packages + +target/dist/SHA256SUMS: target/dist + $Q cd target/dist && $(CHECKSUM) * > SHA256SUMS + +.PHONY: checksums +checksums: target/dist/SHA256SUMS ## Generates checksums for all packages + +################################################################################ +### Debian Packages +################################################################################ + +to_debian_arch = $(shell echo $(1) | \ + $(SED) -e 's/x86_64/amd64/' -e 's/aarch64/arm64/' -e 's/armv7/armhf/') +DEBIAN_PACKAGE_TARGETS := \ + $(foreach t, $(TARGETS), target/$(t)/debian/$(PACKAGE_NAME)_$(VERSION)_$(call to_debian_arch, $(firstword $(subst -, , $(t)))).deb) + +.ONESHELL: $(DEBIAN_PACKAGE_TARGETS) +.NOTPARALLEL: $(DEBIAN_PACKAGE_TARGETS) +$(DEBIAN_PACKAGE_TARGETS): $(TARGETS) target/man/$(OUTPUT_BINARY).1.gz target/dist + $Q TARGET="$(word 2, $(subst /, , $(dir $@)))" + # Skip building debs for musl targets + if echo "$(@)" | $(GREP) -q 'musl\|apple'; then \ + exit 0 + fi + if [ ! -f "$(CURDIR)/$(@)" ]; then + if [ -d "$(CURDIR)/target/release" ]; then \ + echo "$(M) removing existing release directory: $(CURDIR)/target/release" + rm -rf "$(CURDIR)/target/release" + fi + echo "$(M) copying target architecture [$${TARGET}] build to target/release directory" + cp -r "$(CURDIR)/target/$${TARGET}/release" "$(CURDIR)/target/release" + echo "$(M) building debian package for target [$${TARGET}]: $(@)" + $(CARGO) deb --package unitctl --no-build --target "$${TARGET}" --output "$(CURDIR)/$(@)" + ln -f "$(CURDIR)/$(@)" "$(CURDIR)/target/dist/" + fi + +## Creates a debian package for the current platform +.PHONY: deb-packages +deb-packages: install-packaging-deb $(TARGETS) manpage $(DEBIAN_PACKAGE_TARGETS) + +################################################################################ +### RPM Packages +################################################################################ + +RPM_PACKAGE_TARGETS := $(foreach t, $(TARGETS), target/$(t)/generate-rpm/$(PACKAGE_NAME)_$(VERSION)_$(firstword $(subst -, , $(t))).rpm) + +.ONESHELL: $(RPM_PACKAGE_TARGETS) +.NOTPARALLEL: $(RPM_PACKAGE_TARGETS) +$(RPM_PACKAGE_TARGETS): $(TARGETS) target/man/$(OUTPUT_BINARY).1.gz target/dist + $Q TARGET="$(word 2, $(subst /, , $(dir $@)))" + ARCH="$(firstword $(subst -, , $(word 2, $(subst /, , $(dir $@)))))" + # Skip building rpms for musl targets + if echo "$(@)" | $(GREP) -q 'musl\|apple'; then \ + exit 0 + fi + if [ ! -f "$(CURDIR)/$(@)" ]; then + if [ -d "$(CURDIR)/target/release" ]; then \ + echo "$(M) removing existing release directory: $(CURDIR)/target/release" + rm -rf "$(CURDIR)/target/release" + fi + echo "$(M) copying target architecture [$${ARCH}] build to target/release directory" + cp -r "$(CURDIR)/target/$${TARGET}/release" "$(CURDIR)/target/release" + echo "$(M) building rpm package: $(@)" + $(CARGO) generate-rpm --package unitctl --arch "$${ARCH}" --target "$${TARGET}" --output "$(CURDIR)/$(@)" + rm -rf "$(CURDIR)/target/release" + ln -f "$(CURDIR)/$(@)" "$(CURDIR)/target/dist/" + fi + +## Creates a rpm package for the current platform +.PHONY: rpm-packages +rpm-packages: install-packaging-rpm $(TARGETS) manpage $(RPM_PACKAGE_TARGETS) + +################################################################################ +### Homebrew Packages +################################################################################ + +## Modifies the homebrew formula to point to the latest release +.PHONY: homebrew-packages +.ONESHELL: homebrew-packages +homebrew-packages: target/dist/SHA256SUMS +ifdef NEW_VERSION + VERSION=$(NEW_VERSION) +endif + $Q \ + VERSION="$(VERSION)" \ + PACKAGE_NAME="$(PACKAGE_NAME)" \ + SRC_REPO="$(SRC_REPO)" \ + AARCH64_UNKNOWN_LINUX_GNU_SHA256="$$($(GREP) $(PACKAGE_NAME)_v$(VERSION)_aarch64-unknown-linux-gnu.tar.gz $(CURDIR)/target/dist/SHA256SUMS | cut -d ' ' -f 1)" \ + X86_64_UNKNOWN_LINUX_GNU_SHA256="$$($(GREP) $(PACKAGE_NAME)_v$(VERSION)_x86_64-unknown-linux-gnu.tar.gz $(CURDIR)/target/dist/SHA256SUMS | cut -d ' ' -f 1)" \ + X86_64_APPLE_DARWIN_SHA256="$$($(GREP) $(PACKAGE_NAME)_v$(VERSION)_x86_64-apple-darwin.tar.gz $(CURDIR)/target/dist/SHA256SUMS | cut -d ' ' -f 1)" \ + AARCH64_APPLE_DARWIN_SHA256="$$($(GREP) $(PACKAGE_NAME)_v$(VERSION)_aarch64-apple-darwin.tar.gz $(CURDIR)/target/dist/SHA256SUMS | cut -d ' ' -f 1)" \ + envsubst < $(CURDIR)/pkg/brew/$(PACKAGE_NAME).rb.template > $(CURDIR)/pkg/brew/$(PACKAGE_NAME).rb + + +################################################################################ +### Tarball Packages +################################################################################ + +GZ_PACKAGE_TARGETS = $(foreach t, $(TARGETS), target/gz/$(t)/$(PACKAGE_NAME)_$(VERSION)_$(firstword $(subst -, , $(t))).tar.gz) + +.ONESHELL: $(GZ_PACKAGE_TARGETS) +$(GZ_PACKAGE_TARGETS): $(TARGETS) target/man/$(PACKAGE_NAME).1.gz target/dist + $Q mkdir -p "$(CURDIR)/target/gz" + TARGET="$(word 3, $(subst /, , $(dir $@)))" + PACKAGE="$(CURDIR)/target/gz/$(PACKAGE_NAME)_v$(VERSION)_$${TARGET}.tar.gz" + if [ ! -f "$${PACKAGE}}" ]; then + tar -cz -f $${PACKAGE} \ + -C $(CURDIR)/target/man $(PACKAGE_NAME).1.gz \ + -C $(CURDIR)/target/$${TARGET}/release $(PACKAGE_NAME) \ + -C $(CURDIR) LICENSE.txt + ln -f "$${PACKAGE}" "$(CURDIR)/target/dist/" + fi + +## Creates a gzipped tarball all target platforms +.PHONE: gz-packages +gz-packages: $(GZ_PACKAGE_TARGETS) diff --git a/tools/unitctl/build/release.mk b/tools/unitctl/build/release.mk new file mode 100644 index 00000000..949e9301 --- /dev/null +++ b/tools/unitctl/build/release.mk @@ -0,0 +1,57 @@ +.ONESHELL: target/dist/release_notes.md +target/dist/release_notes.md: target/dist target/dist/SHA256SUMS + $(info $(M) building release notes) @ + $Q echo "# Release Notes" > target/dist/release_notes.md + echo '## SHA256 Checksums' >> target/dist/release_notes.md + echo '```' >> target/dist/release_notes.md + cat target/dist/SHA256SUMS >> target/dist/release_notes.md + echo '```' >> target/dist/release_notes.md + +.PHONY: release-notes +release-notes: target/dist/release_notes.md ## Build release notes + +.PHONY: version +version: ## Outputs the current version + $Q echo "Version: $(VERSION)" + +.PHONY: version-update +.ONESHELL: version-update +version-update: ## Prompts for a new version + $(info $(M) updating repository to new version) @ + $Q echo " last committed version: $(LAST_VERSION)" + $Q echo " Cargo.toml file version : $(VERSION)" + read -p " Enter new version in the format (MAJOR.MINOR.PATCH): " version + $Q echo "$$version" | $(GREP) -qE '^[0-9]+\.[0-9]+\.[0-9]+-?.*$$' || \ + (echo "invalid version identifier: $$version" && exit 1) && \ + $(SED) -i "s/^version\s*=.*$$/version = \"$$version\"/" \ + $(CURDIR)/unit-client-rs/Cargo.toml + $(SED) -i "s/^version\s*=.*$$/version = \"$$version\"/" \ + $(CURDIR)/unitctl/Cargo.toml + $(SED) -i "s/^version\s*=.*$$/version = \"$$version\"/" \ + $(CURDIR)/unit-openapi/Cargo.toml + $(SED) -i "s/^\s*\"packageVersion\":\s*.*$$/ \"packageVersion\": \"$$version\",/" \ + $(CURDIR)/openapi-config.json + @ VERSION=$(shell $(GREP) -Po '^version\s+=\s+"\K.*?(?=")' \ + $(CURDIR)/unitctl/Cargo.toml) + +.PHONY: version-release +.ONESHELL: version-release +version-release: ## Change from a pre-release to full release version + $Q echo "$(VERSION)" | $(GREP) -qE '^[0-9]+\.[0-9]+\.[0-9]+-beta$$' || \ + (echo "invalid version identifier - must contain suffix -beta: $(VERSION)" && exit 1) + export NEW_VERSION="$(shell echo $(VERSION) | $(SED) -e 's/-beta$$//')" + $(SED) -i "s/^version\s*=.*$$/version = \"$$NEW_VERSION\"/" \ + $(CURDIR)/unit-client-rs/Cargo.toml + $(SED) -i "s/^version\s*=.*$$/version = \"$$NEW_VERSION\"/" \ + $(CURDIR)/unitctl/Cargo.toml + $(SED) -i "s/^version\s*=.*$$/version = \"$$NEW_VERSION\"/" \ + $(CURDIR)/unit-openapi/Cargo.toml + $(SED) -i "s/^\s*\"packageVersion\":\s*.*$$/ \"packageVersion\": \"$$NEW_VERSION\",/" \ + $(CURDIR)/openapi-config.json + @ VERSION=$(shell $(GREP) -Po '^version\s+=\s+"\K.*?(?=")' \ + $(CURDIR)/unitctl/Cargo.toml) + +.PHONY: cargo-release +cargo-release: ## Releases a new version to crates.io + $(info $(M) releasing version $(VERSION) to crates.io) @ + $Q $(CARGO) publish diff --git a/tools/unitctl/man/unitctl.1 b/tools/unitctl/man/unitctl.1 new file mode 100644 index 00000000..0d775b6f --- /dev/null +++ b/tools/unitctl/man/unitctl.1 @@ -0,0 +1,27 @@ +.\" Manpage for unitctl +.\" +.TH UNITCTL "1" "2022-12-29" "%%VERSION%%" "unitctl" +.SH NAME +unitctl \- NGINX Unit Control Utility +.SH SYNOPSIS +unitctl [\fI\,FLAGS\/\fR] [\fI\,OPTIONS\/\fR] [\fI\,FILE\/\fR]... +.SH DESCRIPTION +WRITE ME +. +.SH "REPORTING BUGS" +Report any issues on the project issue tracker at: +.br +\fB<https://github.com/nginx/unit>\fR +. +.SH ACKNOWLEDGEMENTS +WRITE ME +. +.SH AUTHOR +Elijah Zupancic \fB<e.zupancic@f5.com>\fR +. +.SH COPYRIGHT +Copyright \(co 2022 F5. All Rights Reserved. +.br +License: Apache License 2.0 (Apache-2.0) +.br +Full License Text: <https://www.apache.org/licenses/LICENSE-2.0> diff --git a/tools/unitctl/openapi-config.json b/tools/unitctl/openapi-config.json new file mode 100644 index 00000000..c47caadb --- /dev/null +++ b/tools/unitctl/openapi-config.json @@ -0,0 +1,6 @@ +{ + "packageName": "unit-openapi", + "packageVersion": "1.33.0", + "library": "hyper", + "preferUnsignedInt": true +} diff --git a/tools/unitctl/pkg/brew/unitctl.rb b/tools/unitctl/pkg/brew/unitctl.rb new file mode 100644 index 00000000..05d17d3f --- /dev/null +++ b/tools/unitctl/pkg/brew/unitctl.rb @@ -0,0 +1,29 @@ +class Unitctl < Formula + desc "CLI interface to the NGINX Unit Control API" + homepage "https://github.com/nginxinc/unit-rust-sdk" + version "0.3.0" + package_name = "unitctl" + src_repo = "https://github.com/nginxinc/unit-rust-sdk" + + if OS.mac? and Hardware::CPU.intel? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_v#{version}_x86_64-apple-darwin.tar.gz" + sha256 "3e476850d1fc08aabc3cb25d19d42d171f52d55cea887aec754d47d1142c3638" + elsif OS.mac? and Hardware::CPU.arm? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_aarch64-apple-darwin.tar.gz" + sha256 "c1ec83ae67c08640f1712fba1c8aa305c063570fb7f96203228bf75413468bab" + elsif OS.linux? and Hardware::CPU.intel? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_x86_64-unknown-linux-gnu.tar.gz" + sha256 "9616687a7e4319c8399c0071059e6c1bb80b7e5b616714edc81a92717264a70f" + elsif OS.linux? and Hardware::CPU.arm? and Hardware::CPU.is_64_bit? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_aarch64-unknown-linux-gnu.tar.gz" + sha256 "88c2c7a8bc3d1930080c2b9a397a33e156ae4f876903b6565775270584055534" + else + odie "Unsupported architecture" + end + + + def install + bin.install "unitctl" + man1.install "unitctl.1.gz" + end +end diff --git a/tools/unitctl/pkg/brew/unitctl.rb.template b/tools/unitctl/pkg/brew/unitctl.rb.template new file mode 100644 index 00000000..f690abe2 --- /dev/null +++ b/tools/unitctl/pkg/brew/unitctl.rb.template @@ -0,0 +1,29 @@ +class Unitctl < Formula + desc "CLI interface to the NGINX Unit Control API" + homepage "https://github.com/nginxinc/unit-rust-sdk" + version "$VERSION" + package_name = "$PACKAGE_NAME" + src_repo = "$SRC_REPO" + + if OS.mac? and Hardware::CPU.intel? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_v#{version}_x86_64-apple-darwin.tar.gz" + sha256 "$X86_64_APPLE_DARWIN_SHA256" + elsif OS.mac? and Hardware::CPU.arm? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_aarch64-apple-darwin.tar.gz" + sha256 "$AARCH64_APPLE_DARWIN_SHA256" + elsif OS.linux? and Hardware::CPU.intel? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_x86_64-unknown-linux-gnu.tar.gz" + sha256 "$X86_64_UNKNOWN_LINUX_GNU_SHA256" + elsif OS.linux? and Hardware::CPU.arm? and Hardware::CPU.is_64_bit? + url "#{src_repo}/releases/download/v#{version}/#{package_name}_#{version}_aarch64-unknown-linux-gnu.tar.gz" + sha256 "$AARCH64_UNKNOWN_LINUX_GNU_SHA256" + else + odie "Unsupported architecture" + end + + + def install + bin.install "unitctl" + man1.install "unitctl.1.gz" + end +end diff --git a/tools/unitctl/rustfmt.toml b/tools/unitctl/rustfmt.toml new file mode 100644 index 00000000..866c7561 --- /dev/null +++ b/tools/unitctl/rustfmt.toml @@ -0,0 +1 @@ +max_width = 120
\ No newline at end of file diff --git a/tools/unitctl/unit-client-rs/Cargo.toml b/tools/unitctl/unit-client-rs/Cargo.toml new file mode 100644 index 00000000..6d873417 --- /dev/null +++ b/tools/unitctl/unit-client-rs/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "unit-client-rs" +version = "1.33.0" +authors = ["Elijah Zupancic"] +edition = "2021" +license = "Apache-2.0" + +[lib] +name = "unit_client_rs" + +[features] +# this preserves the ordering of json +default = ["serde_json/preserve_order"] + +[dependencies] +custom_error = "1.9" +hyper = { version = "0.14", features = ["stream"] } +hyper-tls = "0.5" +hyperlocal = "0.8" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sysinfo = "0.30.5" +tokio = { version = "1.34", features = ["macros"] } +futures = "0.3" +hex = "0.4" +which = "5.0" + +unit-openapi = { path = "../unit-openapi" } +rustls = "0.23.5" +bollard = "0.16.1" +regex = "1.10.4" +pbr = "1.1.1" + +[dev-dependencies] +rand = "0.8.5" diff --git a/tools/unitctl/unit-client-rs/src/control_socket_address.rs b/tools/unitctl/unit-client-rs/src/control_socket_address.rs new file mode 100644 index 00000000..438ab0ad --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/control_socket_address.rs @@ -0,0 +1,569 @@ +use crate::control_socket_address::ControlSocket::{TcpSocket, UnixLocalAbstractSocket, UnixLocalSocket}; +use crate::control_socket_address::ControlSocketScheme::{HTTP, HTTPS}; +use crate::unit_client::UnitClientError; +use hyper::http::uri::{Authority, PathAndQuery}; +use hyper::Uri; +use std::fmt::{Display, Formatter}; +use std::fs; +use std::os::unix::fs::FileTypeExt; +use std::path::{PathBuf, MAIN_SEPARATOR}; + +type AbstractSocketName = String; +type UnixSocketPath = PathBuf; +type Port = u16; + +#[derive(Debug, Clone)] +pub enum ControlSocket { + UnixLocalAbstractSocket(AbstractSocketName), + UnixLocalSocket(UnixSocketPath), + TcpSocket(Uri), +} + +#[derive(Debug)] +pub enum ControlSocketScheme { + HTTP, + HTTPS, +} + +impl ControlSocketScheme { + fn port(&self) -> Port { + match self { + HTTP => 80, + HTTPS => 443, + } + } +} + +impl Display for ControlSocket { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + UnixLocalAbstractSocket(name) => f.write_fmt(format_args!("unix:@{}", name)), + UnixLocalSocket(path) => f.write_fmt(format_args!("unix:{}", path.to_string_lossy())), + TcpSocket(uri) => uri.fmt(f), + } + } +} + +impl From<ControlSocket> for String { + fn from(val: ControlSocket) -> Self { + val.to_string() + } +} + +impl From<ControlSocket> for PathBuf { + fn from(val: ControlSocket) -> Self { + match val { + UnixLocalAbstractSocket(socket_name) => PathBuf::from(format!("@{}", socket_name)), + UnixLocalSocket(socket_path) => socket_path, + TcpSocket(_) => PathBuf::default(), + } + } +} + +impl From<ControlSocket> for Uri { + fn from(val: ControlSocket) -> Self { + val.create_uri_with_path("") + } +} + +impl TryFrom<String> for ControlSocket { + type Error = UnitClientError; + + fn try_from(socket_address: String) -> Result<Self, Self::Error> { + ControlSocket::parse_address(socket_address.as_str()) + } +} + +impl TryFrom<&str> for ControlSocket { + type Error = UnitClientError; + + fn try_from(socket_address: &str) -> Result<Self, Self::Error> { + ControlSocket::parse_address(socket_address) + } +} + +impl TryFrom<Uri> for ControlSocket { + type Error = UnitClientError; + + fn try_from(socket_uri: Uri) -> Result<Self, Self::Error> { + match socket_uri.scheme_str() { + // URIs with the unix scheme will have a hostname that is a hex encoded string + // representing the path to the socket + Some("unix") => { + let host = match socket_uri.host() { + Some(host) => host, + None => { + return Err(UnitClientError::TcpSocketAddressParseError { + message: "No host found in socket address".to_string(), + control_socket_address: socket_uri.to_string(), + }) + } + }; + let bytes = hex::decode(host).map_err(|error| UnitClientError::TcpSocketAddressParseError { + message: error.to_string(), + control_socket_address: socket_uri.to_string(), + })?; + let path = String::from_utf8_lossy(&bytes); + ControlSocket::parse_address(path) + } + Some("http") | Some("https") => Ok(TcpSocket(socket_uri)), + Some(unknown) => Err(UnitClientError::TcpSocketAddressParseError { + message: format!("Unsupported scheme found in socket address: {}", unknown).to_string(), + control_socket_address: socket_uri.to_string(), + }), + None => Err(UnitClientError::TcpSocketAddressParseError { + message: "No scheme found in socket address".to_string(), + control_socket_address: socket_uri.to_string(), + }), + } + } +} + +impl ControlSocket { + pub fn socket_scheme(&self) -> ControlSocketScheme { + match self { + UnixLocalAbstractSocket(_) => ControlSocketScheme::HTTP, + UnixLocalSocket(_) => ControlSocketScheme::HTTP, + TcpSocket(uri) => match uri.scheme_str().expect("Scheme should not be None") { + "http" => ControlSocketScheme::HTTP, + "https" => ControlSocketScheme::HTTPS, + _ => unreachable!("Scheme should be http or https"), + }, + } + } + + pub fn create_uri_with_path(&self, str_path: &str) -> Uri { + match self { + UnixLocalAbstractSocket(name) => { + let socket_path = PathBuf::from(format!("@{}", name)); + hyperlocal::Uri::new(socket_path, str_path).into() + } + UnixLocalSocket(socket_path) => hyperlocal::Uri::new(socket_path, str_path).into(), + TcpSocket(uri) => { + if str_path.is_empty() { + uri.clone() + } else { + let authority = uri.authority().expect("Authority should not be None"); + Uri::builder() + .scheme(uri.scheme_str().expect("Scheme should not be None")) + .authority(authority.clone()) + .path_and_query(str_path) + .build() + .expect("URI should be valid") + } + } + } + } + + pub fn validate_http_address(uri: Uri) -> Result<(), UnitClientError> { + let http_address = uri.to_string(); + if uri.authority().is_none() { + return Err(UnitClientError::TcpSocketAddressParseError { + message: "No authority found in socket address".to_string(), + control_socket_address: http_address, + }); + } + if uri.port_u16().is_none() { + return Err(UnitClientError::TcpSocketAddressNoPortError { + control_socket_address: http_address, + }); + } + if !(uri.path().is_empty() || uri.path().eq("/")) { + return Err(UnitClientError::TcpSocketAddressParseError { + message: format!("Path is not empty or is not / [path={}]", uri.path()), + control_socket_address: http_address, + }); + } + + Ok(()) + } + + pub fn validate_unix_address(socket: PathBuf) -> Result<(), UnitClientError> { + if !socket.exists() { + return Err(UnitClientError::UnixSocketNotFound { + control_socket_address: socket.to_string_lossy().to_string(), + }); + } + let metadata = fs::metadata(&socket).map_err(|error| UnitClientError::UnixSocketAddressError { + source: error, + control_socket_address: socket.to_string_lossy().to_string(), + })?; + let file_type = metadata.file_type(); + if !file_type.is_socket() { + return Err(UnitClientError::UnixSocketAddressError { + source: std::io::Error::new(std::io::ErrorKind::Other, "Control socket path is not a socket"), + control_socket_address: socket.to_string_lossy().to_string(), + }); + } + + Ok(()) + } + + pub fn validate(&self) -> Result<Self, UnitClientError> { + match self { + UnixLocalAbstractSocket(socket_name) => { + let socket_path = PathBuf::from(format!("@{}", socket_name)); + Self::validate_unix_address(socket_path.clone()) + } + UnixLocalSocket(socket_path) => Self::validate_unix_address(socket_path.clone()), + TcpSocket(socket_uri) => Self::validate_http_address(socket_uri.clone()), + } + .map(|_| self.to_owned()) + } + + fn normalize_and_parse_http_address(http_address: String) -> Result<Uri, UnitClientError> { + // Convert *:1 style network addresses to URI format + let address = if http_address.starts_with("*:") { + http_address.replacen("*:", "http://127.0.0.1:", 1) + // Add scheme if not present + } else if !(http_address.starts_with("http://") || http_address.starts_with("https://")) { + format!("http://{}", http_address) + } else { + http_address.to_owned() + }; + + let is_https = address.starts_with("https://"); + + let parsed_uri = + Uri::try_from(address.as_str()).map_err(|error| UnitClientError::TcpSocketAddressUriError { + source: error, + control_socket_address: address, + })?; + let authority = parsed_uri.authority().expect("Authority should not be None"); + let expected_port = if is_https { HTTPS.port() } else { HTTP.port() }; + let normalized_authority = match authority.port_u16() { + Some(_) => authority.to_owned(), + None => { + let host = format!("{}:{}", authority.host(), expected_port); + Authority::try_from(host.as_str()).expect("Authority should be valid") + } + }; + + let normalized_uri = Uri::builder() + .scheme(parsed_uri.scheme_str().expect("Scheme should not be None")) + .authority(normalized_authority) + .path_and_query(PathAndQuery::from_static("")) + .build() + .map_err(|error| UnitClientError::TcpSocketAddressParseError { + message: error.to_string(), + control_socket_address: http_address.clone(), + })?; + + Ok(normalized_uri) + } + + /// Flexibly parse a textual representation of a socket address + pub fn parse_address<S: Into<String>>(socket_address: S) -> Result<Self, UnitClientError> { + let full_socket_address: String = socket_address.into(); + let socket_prefix = "unix:"; + let socket_uri_prefix = "unix://"; + let mut buf = String::with_capacity(socket_prefix.len()); + for (i, c) in full_socket_address.char_indices() { + // Abstract unix socket with no prefix + if i == 0 && c == '@' { + return Ok(UnixLocalAbstractSocket(full_socket_address[1..].to_string())); + } + buf.push(c); + // Unix socket with prefix + if i == socket_prefix.len() - 1 && buf.eq(socket_prefix) { + let path_text = full_socket_address[socket_prefix.len()..].to_string(); + // Return here if this URI does not have a scheme followed by double slashes + if !path_text.starts_with("//") { + return match path_text.strip_prefix('@') { + Some(name) => Ok(UnixLocalAbstractSocket(name.to_string())), + None => { + let path = PathBuf::from(path_text); + Ok(UnixLocalSocket(path)) + } + }; + } + } + + // Unix socket with URI prefix + if i == socket_uri_prefix.len() - 1 && buf.eq(socket_uri_prefix) { + let uri = Uri::try_from(full_socket_address.as_str()).map_err(|error| { + UnitClientError::TcpSocketAddressParseError { + message: error.to_string(), + control_socket_address: full_socket_address.clone(), + } + })?; + return ControlSocket::try_from(uri); + } + } + + /* Sockets on Windows are not supported, so there is no need to check + * if the socket address is a valid path, so we can do this shortcut + * here to see if a path was specified without a unix: prefix. */ + if buf.starts_with(MAIN_SEPARATOR) { + let path = PathBuf::from(buf); + return Ok(UnixLocalSocket(path)); + } + + let uri = Self::normalize_and_parse_http_address(buf)?; + Ok(TcpSocket(uri)) + } + + pub fn is_local_socket(&self) -> bool { + match self { + UnixLocalAbstractSocket(_) | UnixLocalSocket(_) => true, + TcpSocket(_) => false, + } + } +} + +#[cfg(test)] +mod tests { + use rand::distributions::{Alphanumeric, DistString}; + use std::env::temp_dir; + use std::fmt::Display; + use std::io; + use std::os::unix::net::UnixListener; + + use super::*; + + struct TempSocket { + socket_path: PathBuf, + _listener: UnixListener, + } + + impl TempSocket { + fn shutdown(&mut self) -> io::Result<()> { + fs::remove_file(&self.socket_path) + } + } + + impl Display for TempSocket { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "unix:{}", self.socket_path.to_string_lossy().to_string()) + } + } + + impl Drop for TempSocket { + fn drop(&mut self) { + self.shutdown() + .expect(format!("Unable to shutdown socket {}", self.socket_path.to_string_lossy()).as_str()); + } + } + + #[test] + fn will_error_with_nonexistent_unix_socket() { + let socket_address = "unix:/tmp/some_random_filename_that_doesnt_exist.sock"; + let control_socket = + ControlSocket::try_from(socket_address).expect("No error should be returned until validate() is called"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + assert!(control_socket.validate().is_err(), "Socket should not be valid"); + } + + #[test] + fn can_parse_socket_with_prefix() { + let temp_socket = create_file_socket().expect("Unable to create socket"); + let control_socket = ControlSocket::try_from(temp_socket.to_string()).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[test] + fn can_parse_socket_from_uri() { + let temp_socket = create_file_socket().expect("Unable to create socket"); + let uri: Uri = hyperlocal::Uri::new(temp_socket.socket_path.clone(), "").into(); + let control_socket = ControlSocket::try_from(uri).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[test] + fn can_parse_socket_from_uri_text() { + let temp_socket = create_file_socket().expect("Unable to create socket"); + let uri: Uri = hyperlocal::Uri::new(temp_socket.socket_path.clone(), "").into(); + let control_socket = ControlSocket::parse_address(uri.to_string()).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket for input text should be valid: {}", e); + } + } + + #[test] + #[cfg(target_os = "linux")] + fn can_parse_abstract_socket_from_uri() { + let temp_socket = create_abstract_socket().expect("Unable to create socket"); + let uri: Uri = hyperlocal::Uri::new(temp_socket.socket_path.clone(), "").into(); + let control_socket = ControlSocket::try_from(uri).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[test] + #[cfg(target_os = "linux")] + fn can_parse_abstract_socket_from_uri_text() { + let temp_socket = create_abstract_socket().expect("Unable to create socket"); + let uri: Uri = hyperlocal::Uri::new(temp_socket.socket_path.clone(), "").into(); + let control_socket = ControlSocket::parse_address(uri.to_string()).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[test] + fn can_parse_socket_without_prefix() { + let temp_socket = create_file_socket().expect("Unable to create socket"); + let control_socket = ControlSocket::try_from(temp_socket.socket_path.to_string_lossy().to_string()) + .expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[cfg(target_os = "linux")] + #[test] + fn can_parse_abstract_socket() { + let temp_socket = create_abstract_socket().expect("Unable to create socket"); + let control_socket = ControlSocket::try_from(temp_socket.to_string()).expect("Error parsing good socket path"); + assert!(control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + + #[test] + fn can_normalize_good_http_socket_addresses() { + let valid_socket_addresses = vec![ + "http://127.0.0.1:8080", + "https://127.0.0.1:8080", + "http://127.0.0.1:8080/", + "127.0.0.1:8080", + "http://0.0.0.0:8080", + "https://0.0.0.0:8080", + "http://0.0.0.0:8080/", + "0.0.0.0:8080", + "http://localhost:8080", + "https://localhost:8080", + "http://localhost:8080/", + "localhost:8080", + "http://[::1]:8080", + "https://[::1]:8080", + "http://[::1]:8080/", + "[::1]:8080", + "http://[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + "https://[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + "http://[0000:0000:0000:0000:0000:0000:0000:0000]:8080/", + "[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + ]; + for socket_address in valid_socket_addresses { + let mut expected = if socket_address.starts_with("http") { + socket_address.to_string().trim_end_matches('/').to_string() + } else { + format!("http://{}", socket_address).trim_end_matches('/').to_string() + }; + expected.push('/'); + + let control_socket = ControlSocket::try_from(socket_address).expect("Error parsing good socket path"); + assert!(!control_socket.is_local_socket(), "Not parsed as a local socket"); + if let Err(e) = control_socket.validate() { + panic!("Socket should be valid: {}", e); + } + } + } + + #[test] + fn can_normalize_wildcard_http_socket_address() { + let socket_address = "*:8080"; + let expected = "http://127.0.0.1:8080/"; + let normalized_result = ControlSocket::normalize_and_parse_http_address(socket_address.to_string()); + let normalized = normalized_result + .expect("Unable to normalize socket address") + .to_string(); + assert_eq!(normalized, expected); + } + + #[test] + fn can_normalize_http_socket_address_with_no_port() { + let socket_address = "http://localhost"; + let expected = "http://localhost:80/"; + let normalized_result = ControlSocket::normalize_and_parse_http_address(socket_address.to_string()); + let normalized = normalized_result + .expect("Unable to normalize socket address") + .to_string(); + assert_eq!(normalized, expected); + } + + #[test] + fn can_normalize_https_socket_address_with_no_port() { + let socket_address = "https://localhost"; + let expected = "https://localhost:443/"; + let normalized_result = ControlSocket::normalize_and_parse_http_address(socket_address.to_string()); + let normalized = normalized_result + .expect("Unable to normalize socket address") + .to_string(); + assert_eq!(normalized, expected); + } + + #[test] + fn can_parse_http_addresses() { + let valid_socket_addresses = vec![ + "http://127.0.0.1:8080", + "https://127.0.0.1:8080", + "http://127.0.0.1:8080/", + "127.0.0.1:8080", + "http://0.0.0.0:8080", + "https://0.0.0.0:8080", + "http://0.0.0.0:8080/", + "0.0.0.0:8080", + "http://localhost:8080", + "https://localhost:8080", + "http://localhost:8080/", + "localhost:8080", + "http://[::1]:8080", + "https://[::1]:8080", + "http://[::1]:8080/", + "[::1]:8080", + "http://[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + "https://[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + "http://[0000:0000:0000:0000:0000:0000:0000:0000]:8080/", + "[0000:0000:0000:0000:0000:0000:0000:0000]:8080", + ]; + for socket_address in valid_socket_addresses { + let mut expected = if socket_address.starts_with("http") { + socket_address.to_string().trim_end_matches('/').to_string() + } else { + format!("http://{}", socket_address).trim_end_matches('/').to_string() + }; + expected.push('/'); + + let normalized = ControlSocket::normalize_and_parse_http_address(socket_address.to_string()) + .expect("Unable to normalize socket address") + .to_string(); + assert_eq!(normalized, expected); + } + } + + fn create_file_socket() -> Result<TempSocket, io::Error> { + let random = Alphanumeric.sample_string(&mut rand::thread_rng(), 10); + let socket_name = format!("unit-client-socket-test-{}.sock", random); + let socket_path = temp_dir().join(socket_name); + let listener = UnixListener::bind(&socket_path)?; + Ok(TempSocket { + socket_path, + _listener: listener, + }) + } + + #[cfg(target_os = "linux")] + fn create_abstract_socket() -> Result<TempSocket, io::Error> { + let random = Alphanumeric.sample_string(&mut rand::thread_rng(), 10); + let socket_name = format!("@unit-client-socket-test-{}.sock", random); + let socket_path = PathBuf::from(socket_name); + let listener = UnixListener::bind(&socket_path)?; + Ok(TempSocket { + socket_path, + _listener: listener, + }) + } +} diff --git a/tools/unitctl/unit-client-rs/src/lib.rs b/tools/unitctl/unit-client-rs/src/lib.rs new file mode 100644 index 00000000..a0933f42 --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/lib.rs @@ -0,0 +1,16 @@ +extern crate custom_error; +extern crate futures; +extern crate hyper; +extern crate hyper_tls; +extern crate hyperlocal; +extern crate serde; +extern crate serde_json; +pub mod control_socket_address; +mod runtime_flags; +pub mod unit_client; +mod unitd_cmd; +pub mod unitd_configure_options; +pub mod unitd_docker; +pub mod unitd_instance; +pub mod unitd_process; +mod unitd_process_user; diff --git a/tools/unitctl/unit-client-rs/src/runtime_flags.rs b/tools/unitctl/unit-client-rs/src/runtime_flags.rs new file mode 100644 index 00000000..7b31274d --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/runtime_flags.rs @@ -0,0 +1,90 @@ +use std::borrow::Cow; +use std::fmt; +use std::fmt::Display; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone)] +pub struct RuntimeFlags { + pub flags: Cow<'static, str>, +} + +impl Display for RuntimeFlags { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.flags) + } +} + +impl RuntimeFlags { + pub fn new<S>(flags: S) -> RuntimeFlags + where + S: Into<String>, + { + RuntimeFlags { + flags: Cow::from(flags.into()), + } + } + + pub fn has_flag(&self, flag_name: &str) -> bool { + self.flags.contains(format!("--{}", flag_name).as_str()) + } + + pub fn get_flag_value(&self, flag_name: &str) -> Option<String> { + let flag_parts = self.flags.split_ascii_whitespace().collect::<Vec<&str>>(); + for (i, flag) in flag_parts.iter().enumerate() { + if let Some(name) = flag.strip_prefix("--") { + /* If there is no flag value after the current one, there is by definition no + * flag value for the current flag. */ + let index_lt_len = flag_parts.len() > i + 1; + if index_lt_len { + let next_value_isnt_flag = !flag_parts[i + 1].starts_with("--"); + if name.eq(flag_name) && next_value_isnt_flag { + return Some(flag_parts[i + 1].to_string()); + } + } + } + } + None + } + + pub fn control_api_socket_address(&self) -> Option<String> { + self.get_flag_value("control") + } + + pub fn pid_path(&self) -> Option<Box<Path>> { + self.get_flag_value("pid") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + pub fn log_path(&self) -> Option<Box<Path>> { + self.get_flag_value("log") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + pub fn modules_directory(&self) -> Option<Box<Path>> { + self.get_flag_value("modules") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + pub fn state_directory(&self) -> Option<Box<Path>> { + self.get_flag_value("state") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + pub fn tmp_directory(&self) -> Option<Box<Path>> { + self.get_flag_value("tmp") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + pub fn user(&self) -> Option<String> { + self.get_flag_value("user").map(String::from) + } + + pub fn group(&self) -> Option<String> { + self.get_flag_value("group").map(String::from) + } +} diff --git a/tools/unitctl/unit-client-rs/src/unit_client.rs b/tools/unitctl/unit-client-rs/src/unit_client.rs new file mode 100644 index 00000000..3d09e67a --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unit_client.rs @@ -0,0 +1,424 @@ +use std::collections::HashMap; +use std::error::Error as StdError; +use std::fmt::Debug; +use std::rc::Rc; +use std::{fmt, io}; + +use custom_error::custom_error; +use hyper::body::{Buf, HttpBody}; +use hyper::client::{HttpConnector, ResponseFuture}; +use hyper::Error as HyperError; +use hyper::{http, Body, Client, Request}; +use hyper_tls::HttpsConnector; +use hyperlocal::{UnixClientExt, UnixConnector}; +use serde::{Deserialize, Serialize}; + +use crate::control_socket_address::ControlSocket; +use unit_openapi::apis::configuration::Configuration; +use unit_openapi::apis::{ + ApplicationsApi, ApplicationsApiClient, AppsApi, AppsApiClient, Error as OpenAPIError, ListenersApi, + ListenersApiClient, StatusApi, StatusApiClient, +}; +use unit_openapi::models::{ConfigApplication, ConfigListener, Status}; + +const USER_AGENT: &str = concat!("Unit CLI/", env!("CARGO_PKG_VERSION"), "/rust"); + +custom_error! {pub UnitClientError + OpenAPIError { source: OpenAPIError } = "OpenAPI error", + JsonError { source: serde_json::Error, + path: String} = "JSON error [path={path}]", + HyperError { source: hyper::Error, + control_socket_address: String, + path: String} = "Communications error [control_socket_address={control_socket_address}, path={path}]: {source}", + HttpRequestError { source: http::Error, + path: String} = "HTTP error [path={path}]", + HttpResponseError { status: http::StatusCode, + path: String, + body: String} = "HTTP response error [path={path}, status={status}]:\n{body}", + HttpResponseJsonBodyError { status: http::StatusCode, + path: String, + error: String, + detail: String} = "HTTP response error [path={path}, status={status}]:\n Error: {error}\n Detail: {detail}", + IoError { source: io::Error, socket: String } = "IO error [socket={socket}]", + UnixSocketAddressError { + source: io::Error, + control_socket_address: String + } = "Invalid unix domain socket address [control_socket_address={control_socket_address}]", + SocketPermissionsError { control_socket_address: String } = + "Insufficient permissions to connect to control socket [control_socket_address={control_socket_address}]", + UnixSocketNotFound { control_socket_address: String } = "Unix socket not found [control_socket_address={control_socket_address}]", + TcpSocketAddressUriError { + source: http::uri::InvalidUri, + control_socket_address: String + } = "Invalid TCP socket address [control_socket_address={control_socket_address}]", + TcpSocketAddressParseError { + message: String, + control_socket_address: String + } = "Invalid TCP socket address [control_socket_address={control_socket_address}]: {message}", + TcpSocketAddressNoPortError { + control_socket_address: String + } = "TCP socket address does not have a port specified [control_socket_address={control_socket_address}]", + UnitdProcessParseError { + message: String, + pid: u64 + } = "{message} for [pid={pid}]", + UnitdProcessExecError { + source: Box<dyn StdError>, + message: String, + executable_path: String, + pid: u64 + } = "{message} for [pid={pid}, executable_path={executable_path}]: {source}", + UnitdDockerError { + message: String + } = "Failed to communicate with docker daemon: {message}", +} + +impl UnitClientError { + fn new(error: HyperError, control_socket_address: String, path: String) -> Self { + if error.is_connect() { + if let Some(source) = error.source() { + if let Some(io_error) = source.downcast_ref::<io::Error>() { + if io_error.kind().eq(&io::ErrorKind::PermissionDenied) { + return UnitClientError::SocketPermissionsError { control_socket_address }; + } + } + } + } + + UnitClientError::HyperError { + source: error, + control_socket_address, + path, + } + } +} + +macro_rules! new_openapi_client_from_hyper_client { + ($unit_client:expr, $hyper_client: ident, $api_client:ident, $api_trait:ident) => {{ + let config = Configuration { + base_path: $unit_client.control_socket.create_uri_with_path("/").to_string(), + user_agent: Some(format!("{}/OpenAPI-Generator", USER_AGENT).to_owned()), + client: $hyper_client.clone(), + basic_auth: None, + oauth_access_token: None, + api_key: None, + }; + let rc_config = Rc::new(config); + Box::new($api_client::new(rc_config)) as Box<dyn $api_trait> + }}; +} + +macro_rules! new_openapi_client { + ($unit_client:expr, $api_client:ident, $api_trait:ident) => { + match &*$unit_client.client { + RemoteClient::Tcp { client } => { + new_openapi_client_from_hyper_client!($unit_client, client, $api_client, $api_trait) + } + RemoteClient::Unix { client } => { + new_openapi_client_from_hyper_client!($unit_client, client, $api_client, $api_trait) + } + } + }; +} + +#[derive(Clone)] +pub enum RemoteClient<B> +where + B: HttpBody + Send + 'static, + B::Data: Send, + B::Error: Into<Box<dyn StdError + Send + Sync>>, +{ + Unix { + client: Client<UnixConnector, B>, + }, + Tcp { + client: Client<HttpsConnector<HttpConnector>, B>, + }, +} + +impl<B> RemoteClient<B> +where + B: HttpBody + Send + 'static, + B::Data: Send, + B::Error: Into<Box<dyn StdError + Send + Sync>>, +{ + fn client_name(&self) -> &str { + match self { + RemoteClient::Unix { .. } => "Client<UnixConnector, Body>", + RemoteClient::Tcp { .. } => "Client<HttpsConnector<HttpConnector>, Body>", + } + } + + pub fn request(&self, req: Request<B>) -> ResponseFuture { + match self { + RemoteClient::Unix { client } => client.request(req), + RemoteClient::Tcp { client } => client.request(req), + } + } +} + +impl<B> Debug for RemoteClient<B> +where + B: HttpBody + Send + 'static, + B::Data: Send, + B::Error: Into<Box<dyn StdError + Send + Sync>>, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.client_name()) + } +} + +#[derive(Debug)] +pub struct UnitClient { + pub control_socket: ControlSocket, + /// Client for communicating with the control API over the UNIX domain socket + client: Box<RemoteClient<Body>>, +} + +impl UnitClient { + pub fn new(control_socket: ControlSocket) -> Self { + if control_socket.is_local_socket() { + Self::new_unix(control_socket) + } else { + Self::new_http(control_socket) + } + } + + pub fn new_http(control_socket: ControlSocket) -> Self { + let remote_client = Client::builder().build(HttpsConnector::new()); + Self { + control_socket, + client: Box::from(RemoteClient::Tcp { client: remote_client }), + } + } + + pub fn new_unix(control_socket: ControlSocket) -> UnitClient { + let remote_client = Client::unix(); + + Self { + control_socket, + client: Box::from(RemoteClient::Unix { client: remote_client }), + } + } + + /// Sends a request to Unit and deserializes the JSON response body into the value of type `RESPONSE`. + pub async fn send_request_and_deserialize_response<RESPONSE: for<'de> serde::Deserialize<'de>>( + &self, + mut request: Request<Body>, + ) -> Result<RESPONSE, UnitClientError> { + let uri = request.uri().clone(); + let path: &str = uri.path(); + + request.headers_mut().insert("User-Agent", USER_AGENT.parse().unwrap()); + + let response_future = self.client.request(request); + + let response = response_future + .await + .map_err(|error| UnitClientError::new(error, self.control_socket.to_string(), path.to_string()))?; + + let status = response.status(); + let body = hyper::body::aggregate(response) + .await + .map_err(|error| UnitClientError::new(error, self.control_socket.to_string(), path.to_string()))?; + let reader = &mut body.reader(); + if !status.is_success() { + let error: HashMap<String, String> = + serde_json::from_reader(reader).map_err(|error| UnitClientError::JsonError { + source: error, + path: path.to_string(), + })?; + + return Err(UnitClientError::HttpResponseJsonBodyError { + status, + path: path.to_string(), + error: error.get("error").unwrap_or(&"Unknown error".into()).to_string(), + detail: error.get("detail").unwrap_or(&"".into()).to_string(), + }); + } + serde_json::from_reader(reader).map_err(|error| UnitClientError::JsonError { + source: error, + path: path.to_string(), + }) + } + + pub fn listeners_api(&self) -> Box<dyn ListenersApi + 'static> { + new_openapi_client!(self, ListenersApiClient, ListenersApi) + } + + pub async fn listeners(&self) -> Result<HashMap<String, ConfigListener>, Box<UnitClientError>> { + self.listeners_api().get_listeners().await.or_else(|err| { + if let OpenAPIError::Hyper(hyper_error) = err { + Err(Box::new(UnitClientError::new( + hyper_error, + self.control_socket.to_string(), + "/listeners".to_string(), + ))) + } else { + Err(Box::new(UnitClientError::OpenAPIError { source: err })) + } + }) + } + + pub fn status_api(&self) -> Box<dyn StatusApi + 'static> { + new_openapi_client!(self, StatusApiClient, StatusApi) + } + + pub async fn status(&self) -> Result<Status, Box<UnitClientError>> { + self.status_api().get_status().await.or_else(|err| { + if let OpenAPIError::Hyper(hyper_error) = err { + Err(Box::new(UnitClientError::new( + hyper_error, + self.control_socket.to_string(), + "/status".to_string(), + ))) + } else { + Err(Box::new(UnitClientError::OpenAPIError { source: err })) + } + }) + } + + pub fn applications_api(&self) -> Box<dyn ApplicationsApi + 'static> { + new_openapi_client!(self, ApplicationsApiClient, ApplicationsApi) + } + + pub async fn applications(&self) -> Result<HashMap<String, ConfigApplication>, Box<UnitClientError>> { + self.applications_api().get_applications().await.or_else(|err| { + if let OpenAPIError::Hyper(hyper_error) = err { + Err(Box::new(UnitClientError::new( + hyper_error, + self.control_socket.to_string(), + "/applications".to_string(), + ))) + } else { + Err(Box::new(UnitClientError::OpenAPIError { source: err })) + } + }) + } + + pub async fn per_application_api(&self) -> Box<dyn AppsApi + 'static> { + new_openapi_client!(self, AppsApiClient, AppsApi) + } + + pub async fn restart_application(&self, name: &String) -> Result<HashMap<String, String>, Box<UnitClientError>> { + self.per_application_api() + .await + .get_app_restart(name.as_str()) + .await + .or_else(|err| { + if let OpenAPIError::Hyper(hyper_error) = err { + Err(Box::new(UnitClientError::new( + hyper_error, + self.control_socket.to_string(), + format!("/control/applications/{}/restart", name), + ))) + } else { + Err(Box::new(UnitClientError::OpenAPIError { source: err })) + } + }) + } + + pub async fn is_running(&self) -> bool { + self.status().await.is_ok() + } +} + +pub type UnitSerializableMap = HashMap<String, serde_json::Value>; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct UnitStatus { + pub connections: UnitStatusConnections, + pub requests: UnitStatusRequests, + pub applications: HashMap<String, UnitStatusApplication>, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct UnitStatusConnections { + #[serde(default)] + pub closed: usize, + #[serde(default)] + pub idle: usize, + #[serde(default)] + pub active: usize, + #[serde(default)] + pub accepted: usize, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct UnitStatusRequests { + #[serde(default)] + pub active: usize, + #[serde(default)] + pub total: usize, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct UnitStatusApplication { + #[serde(default)] + pub processes: HashMap<String, usize>, + #[serde(default)] + pub requests: HashMap<String, usize>, +} + +#[cfg(test)] +mod tests { + use crate::unitd_instance::UnitdInstance; + + use super::*; + // Integration tests + + #[tokio::test] + async fn can_connect_to_unit_api() { + match UnitdInstance::running_unitd_instances().await.first() { + Some(unit_instance) => { + let control_api_socket_address = unit_instance + .control_api_socket_address() + .expect("No control API socket path found"); + let control_socket = ControlSocket::try_from(control_api_socket_address) + .expect("Unable to parse control socket address"); + let unit_client = UnitClient::new(control_socket); + assert!(unit_client.is_running().await); + } + None => { + eprintln!("No running unitd instances found - skipping test"); + } + } + } + + #[tokio::test] + async fn can_get_unit_status() { + match UnitdInstance::running_unitd_instances().await.first() { + Some(unit_instance) => { + let control_api_socket_address = unit_instance + .control_api_socket_address() + .expect("No control API socket path found"); + let control_socket = ControlSocket::try_from(control_api_socket_address) + .expect("Unable to parse control socket address"); + let unit_client = UnitClient::new(control_socket); + let status = unit_client.status().await.expect("Unable to get unit status"); + println!("Unit status: {:?}", status); + } + None => { + eprintln!("No running unitd instances found - skipping test"); + } + } + } + + #[tokio::test] + async fn can_get_unit_listeners() { + match UnitdInstance::running_unitd_instances().await.first() { + Some(unit_instance) => { + let control_api_socket_address = unit_instance + .control_api_socket_address() + .expect("No control API socket path found"); + let control_socket = ControlSocket::try_from(control_api_socket_address) + .expect("Unable to parse control socket address"); + let unit_client = UnitClient::new(control_socket); + unit_client.listeners().await.expect("Unable to get Unit listeners"); + } + None => { + eprintln!("No running unitd instances found - skipping test"); + } + } + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_cmd.rs b/tools/unitctl/unit-client-rs/src/unitd_cmd.rs new file mode 100644 index 00000000..17563cb0 --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_cmd.rs @@ -0,0 +1,88 @@ +use std::error::Error as StdError; +use std::io::{Error as IoError, ErrorKind}; + +use crate::runtime_flags::RuntimeFlags; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone)] +pub struct UnitdCmd { + pub(crate) process_executable_path: Option<Box<Path>>, + pub version: Option<String>, + pub flags: Option<RuntimeFlags>, +} + +impl UnitdCmd { + pub(crate) fn new<S>(full_cmd: S, binary_name: &str) -> Result<UnitdCmd, Box<dyn StdError>> + where + S: Into<String>, + { + let process_cmd: String = full_cmd.into(); + let parsable = process_cmd + .strip_prefix("unit: main v") + .and_then(|s| s.strip_suffix(']')); + if parsable.is_none() { + let msg = format!("cmd does not have the expected format: {}", process_cmd); + return Err(IoError::new(ErrorKind::InvalidInput, msg).into()); + } + let parts = parsable + .expect("Unable to parse cmd") + .splitn(2, " [") + .collect::<Vec<&str>>(); + + if parts.len() != 2 { + let msg = format!("cmd does not have the expected format: {}", process_cmd); + return Err(IoError::new(ErrorKind::InvalidInput, msg).into()); + } + + let version = Some(parts[0].to_string()); + let executable_path = UnitdCmd::parse_executable_path_from_cmd(parts[1], binary_name); + let flags = UnitdCmd::parse_runtime_flags_from_cmd(parts[1]); + + Ok(UnitdCmd { + process_executable_path: executable_path, + version, + flags, + }) + } + + fn parse_executable_path_from_cmd<S>(full_cmd: S, binary_name: &str) -> Option<Box<Path>> + where + S: Into<String>, + { + let cmd = full_cmd.into(); + if cmd.is_empty() { + return None; + } + + let split = cmd.splitn(2, binary_name).collect::<Vec<&str>>(); + if split.is_empty() { + return None; + } + + let path = format!("{}{}", split[0], binary_name); + Some(PathBuf::from(path).into_boxed_path()) + } + + fn parse_runtime_flags_from_cmd<S>(full_cmd: S) -> Option<RuntimeFlags> + where + S: Into<String>, + { + let cmd = full_cmd.into(); + if cmd.is_empty() { + return None; + } + + // Split out everything in between the brackets [ and ] + let split = cmd.trim_end_matches(']').splitn(2, '[').collect::<Vec<&str>>(); + if split.is_empty() { + return None; + } + /* Now we need to parse a string like this: + * ./sbin/unitd --no-daemon --tmp /tmp + * and only return what is after the invoking command */ + split[0] + .find("--") + .map(|index| cmd[index..].to_string()) + .map(RuntimeFlags::new) + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_configure_options.rs b/tools/unitctl/unit-client-rs/src/unitd_configure_options.rs new file mode 100644 index 00000000..00ee22a3 --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_configure_options.rs @@ -0,0 +1,236 @@ +use custom_error::custom_error; +use std::borrow::Cow; +use std::error::Error as stdError; +use std::io::{BufRead, BufReader, Lines}; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; + +custom_error! {UnitdStderrParseError + VersionNotFound = "Version string output not found", + BuildSettingsNotFound = "Build settings not found" +} + +#[derive(Debug, Clone)] +pub struct UnitdConfigureOptions { + pub version: Cow<'static, str>, + pub all_flags: Cow<'static, str>, +} + +impl UnitdConfigureOptions { + pub fn new(unitd_path: &Path) -> Result<UnitdConfigureOptions, Box<dyn stdError>> { + fn parse_configure_settings_from_unitd_stderr_output<B: BufRead>( + lines: &mut Lines<B>, + ) -> Result<UnitdConfigureOptions, Box<dyn stdError>> { + const VERSION_PREFIX: &str = "unit version: "; + const CONFIGURED_AS_PREFIX: &str = "configured as "; + const CONFIGURE_PREFIX: &str = "configured as ./configure "; + + fn aggregate_parsable_lines( + mut accum: (Option<String>, Option<String>), + line: String, + ) -> (Option<String>, Option<String>) { + if line.starts_with(VERSION_PREFIX) { + accum.0 = line.strip_prefix(VERSION_PREFIX).map(|l| l.to_string()); + } else if line.starts_with(CONFIGURED_AS_PREFIX) { + accum.1 = line.strip_prefix(CONFIGURE_PREFIX).map(|l| l.to_string()); + } + + accum + } + + let options_lines = lines + .filter_map(|line| line.ok()) + .fold((None, None), aggregate_parsable_lines); + + if options_lines.0.is_none() { + return Err(Box::new(UnitdStderrParseError::VersionNotFound) as Box<dyn stdError>); + } else if options_lines.1.is_none() { + return Err(Box::new(UnitdStderrParseError::BuildSettingsNotFound) as Box<dyn stdError>); + } + + Ok(UnitdConfigureOptions { + version: options_lines.0.unwrap().into(), + all_flags: options_lines.1.unwrap().into(), + }) + } + + let program = unitd_path.as_os_str(); + let child = Command::new(program) + .arg("--version") + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn()?; + let output = child.wait_with_output()?; + let err = BufReader::new(&*output.stderr); + parse_configure_settings_from_unitd_stderr_output(&mut err.lines()) + } + + pub fn has_flag(&self, flag_name: &str) -> bool { + self.all_flags + .split_ascii_whitespace() + .any(|flag| flag.starts_with(format!("--{}", flag_name).as_str())) + } + + pub fn get_flag_value(&self, flag_name: &str) -> Option<String> { + self.all_flags + .split_ascii_whitespace() + .find(|flag| flag.starts_with(format!("--{}", flag_name).as_str())) + .and_then(|flag| { + let parts: Vec<&str> = flag.split('=').collect(); + if parts.len() >= 2 { + Some(parts[1].to_owned()) + } else { + None + } + }) + } + + pub fn debug_enabled(&self) -> bool { + self.has_flag("debug") + } + + pub fn openssl_enabled(&self) -> bool { + self.has_flag("openssl") + } + + pub fn prefix_path(&self) -> Option<Box<Path>> { + self.get_flag_value("prefix") + .map(PathBuf::from) + .map(PathBuf::into_boxed_path) + } + + fn join_to_prefix_path<S>(&self, sub_path: S) -> Option<Box<Path>> + where + S: Into<String>, + { + self.prefix_path() + .map(|path| path.join(sub_path.into()).into_boxed_path()) + } + + pub fn default_control_api_socket_address(&self) -> Option<String> { + // If the socket address is specific configured in the configure options, we use + // that. Otherwise, we use the default path as assumed to be unix:$prefix/control.unit.sock. + match self.get_flag_value("control") { + Some(socket_address) => Some(socket_address), + None => { + // Give up if the unitd is compiled with unix sockets disabled + if self.has_flag("no-unix-sockets") { + return None; + } + let socket_path = self.join_to_prefix_path("control.unit.sock"); + socket_path.map(|path| format!("unix:{}", path.to_string_lossy())) + } + } + } + + pub fn default_pid_path(&self) -> Option<Box<Path>> { + match self.get_flag_value("pid") { + Some(pid_path) => self.join_to_prefix_path(pid_path), + None => self.join_to_prefix_path("unit.pid"), + } + } + + pub fn default_log_path(&self) -> Option<Box<Path>> { + match self.get_flag_value("log") { + Some(pid_path) => self.join_to_prefix_path(pid_path), + None => self.join_to_prefix_path("unit.log"), + } + } + + pub fn default_modules_directory(&self) -> Option<Box<Path>> { + match self.get_flag_value("modules") { + Some(modules_dir_name) => self.join_to_prefix_path(modules_dir_name), + None => self.join_to_prefix_path("modules"), + } + } + + pub fn default_state_directory(&self) -> Option<Box<Path>> { + match self.get_flag_value("state") { + Some(state_dir_name) => self.join_to_prefix_path(state_dir_name), + None => self.join_to_prefix_path("state"), + } + } + + pub fn default_tmp_directory(&self) -> Option<Box<Path>> { + match self.get_flag_value("tmp") { + Some(tmp_dir_name) => self.join_to_prefix_path(tmp_dir_name), + None => self.join_to_prefix_path("tmp"), + } + } + pub fn default_user(&self) -> Option<String> { + self.get_flag_value("user").map(String::from) + } + pub fn default_group(&self) -> Option<String> { + self.get_flag_value("group").map(String::from) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::unitd_instance; + use crate::unitd_instance::UNITD_PATH_ENV_KEY; + + #[test] + fn can_detect_key() { + let options = UnitdConfigureOptions { + version: Default::default(), + all_flags: Cow::from("--debug --openssl --prefix=/opt/unit"), + }; + assert!(options.has_flag("debug")); + assert!(options.has_flag("openssl")); + assert!(options.has_flag("prefix")); + assert!(!options.has_flag("fobar")); + } + + #[test] + fn can_get_flag_value_by_key() { + let expected = "/opt/unit"; + let options = UnitdConfigureOptions { + version: Default::default(), + all_flags: Cow::from("--debug --openssl --prefix=/opt/unit"), + }; + + let actual = options.get_flag_value("prefix"); + assert_eq!(expected, actual.unwrap()) + } + + #[test] + fn can_get_prefix_path() { + let expected: Box<Path> = Path::new("/opt/unit").into(); + let options = UnitdConfigureOptions { + version: Default::default(), + all_flags: Cow::from("--debug --openssl --prefix=/opt/unit"), + }; + + let actual = options.prefix_path(); + assert_eq!(expected, actual.unwrap()) + } + + #[test] + fn can_parse_complicated_configure_options() { + let expected: Box<Path> = Path::new("/usr").into(); + let options = UnitdConfigureOptions { + version: Default::default(), + all_flags: Cow::from("--prefix=/usr --state=/var/lib/unit --control=unix:/var/run/control.unit.sock --pid=/var/run/unit.pid --log=/var/log/unit.log --tmp=/var/tmp --user=unit --group=unit --tests --openssl --modules=/usr/lib/unit/modules --libdir=/usr/lib/x86_64-linux-gnu --cc-opt='-g -O2 -fdebug-prefix-map=/data/builder/debuild/unit-1.28.0/pkg/deb/debuild/unit-1.28.0=. -specs=/usr/share/dpkg/no-pie-compile.specs -fstack-protector-strong -Wformat -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -fPIC' --ld-opt='-Wl,-Bsymbolic-functions -specs=/usr/share/dpkg/no-pie-link.specs -Wl,-z,relro -Wl,-z,now -Wl,--as-needed -pie' +"), + }; + + let actual = options.prefix_path(); + assert_eq!(expected, actual.unwrap()) + } + + #[test] + #[ignore] // run this one manually - not in CI + fn can_run_unitd() { + let specific_path = std::env::var(UNITD_PATH_ENV_KEY).map_err(|error| Box::new(error) as Box<dyn stdError>); + let unitd_path = unitd_instance::find_executable_path(specific_path); + let config_options = UnitdConfigureOptions::new(&unitd_path.unwrap()); + match config_options { + Ok(options) => { + println!("{:?}", options) + } + Err(error) => panic!("{}", error), + }; + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_docker.rs b/tools/unitctl/unit-client-rs/src/unitd_docker.rs new file mode 100644 index 00000000..2b9e0c7d --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_docker.rs @@ -0,0 +1,456 @@ +use std::collections::HashMap; +use std::fs::read_to_string; +use std::io::stderr; +use std::path::{PathBuf, MAIN_SEPARATOR}; + +use crate::control_socket_address::ControlSocket; +use crate::futures::StreamExt; +use crate::unit_client::UnitClientError; +use crate::unitd_process::UnitdProcess; + +use bollard::container::{Config, ListContainersOptions, StartContainerOptions}; +use bollard::image::CreateImageOptions; +use bollard::models::{ContainerCreateResponse, ContainerSummary, HostConfig, Mount, MountTypeEnum}; +use bollard::secret::ContainerInspectResponse; +use bollard::Docker; + +use regex::Regex; + +use serde::ser::SerializeMap; +use serde::{Serialize, Serializer}; + +use pbr::ProgressBar; + +#[derive(Clone, Debug)] +pub struct UnitdContainer { + pub container_id: Option<String>, + pub container_image: String, + pub command: Option<String>, + pub mounts: HashMap<PathBuf, PathBuf>, + pub platform: String, + details: Option<ContainerInspectResponse>, +} + +impl From<&ContainerSummary> for UnitdContainer { + fn from(ctr: &ContainerSummary) -> Self { + // we assume paths from the docker api are absolute + // they certainly have to be later... + let mut mounts = HashMap::new(); + if let Some(mts) = &ctr.mounts { + for i in mts { + if let Some(ref src) = i.source { + if let Some(ref dest) = i.destination { + mounts.insert(PathBuf::from(dest.clone()), PathBuf::from(src.clone())); + } + } + } + } + + UnitdContainer { + container_id: ctr.id.clone(), + container_image: format!( + "{} (docker)", + ctr.image.clone().unwrap_or(String::from("unknown container")), + ), + command: ctr.command.clone(), + mounts: mounts, + platform: String::from("Docker"), + details: None, + } + } +} + +impl From<&UnitdContainer> for UnitdProcess { + fn from(ctr: &UnitdContainer) -> Self { + let version = ctr.details.as_ref().and_then(|details| { + details.config.as_ref().and_then(|conf| { + conf.labels.as_ref().and_then(|labels| { + labels + .get("org.opencontainers.image.version") + .and_then(|version| Some(version.clone())) + }) + }) + }); + let command = ctr.command.clone().and_then(|cmd| { + Some(format!( + "{}{} [{}{}]", + "unit: main v", + version.or(Some(String::from(""))).unwrap(), + ctr.container_image, + ctr.rewrite_socket( + cmd.strip_prefix("/usr/local/bin/docker-entrypoint.sh") + .or_else(|| Some("")) + .unwrap() + .to_string() + ) + )) + }); + let mut cmds = vec![]; + let _ = command.map_or((), |cmd| cmds.push(cmd)); + UnitdProcess { + all_cmds: cmds, + binary_name: ctr.container_image.clone(), + process_id: ctr + .details + .as_ref() + .and_then(|details| { + details + .state + .as_ref() + .and_then(|state| state.pid.and_then(|pid| Some(pid.clone() as u64))) + }) + .or(Some(0 as u64)) + .unwrap(), + executable_path: None, + environ: vec![], + working_dir: ctr.details.as_ref().and_then(|details| { + details.config.as_ref().and_then(|conf| { + Some( + PathBuf::from( + conf.working_dir + .as_ref() + .map_or(String::new(), |dir| ctr.host_path(dir.clone())), + ) + .into_boxed_path(), + ) + }) + }), + child_pids: vec![], + user: None, + effective_user: None, + container: Some(ctr.clone()), + } + } +} + +impl Serialize for UnitdContainer { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + // 5 = fields to serialize + let mut state = serializer.serialize_map(Some(5))?; + state.serialize_entry("container_id", &self.container_id)?; + state.serialize_entry("container_image", &self.container_image)?; + state.serialize_entry("command", &self.command)?; + state.serialize_entry("mounts", &self.mounts)?; + state.serialize_entry("platform", &self.platform)?; + state.end() + } +} + +impl UnitdContainer { + pub async fn find_unitd_containers() -> Vec<UnitdContainer> { + if let Ok(docker) = Docker::connect_with_local_defaults() { + match docker.list_containers::<String>(None).await { + Err(e) => { + eprintln!("{}", e); + vec![] + } + Ok(summary) => { + let unitd_command_re = Regex::new(r"^(.* )?unitd( .*)?$").unwrap(); + + // cant do this functionally because of the async call + let mut mapped = vec![]; + for ctr in summary { + if unitd_command_re.is_match(&ctr.clone().command.or(Some(String::new())).unwrap()) { + let mut c = UnitdContainer::from(&ctr); + if let Some(names) = ctr.names { + if names.len() > 0 { + let name = names[0].strip_prefix("/").or(Some(names[0].as_str())).unwrap(); + if let Ok(cir) = docker.inspect_container(name, None).await { + c.details = Some(cir); + } + } + } + mapped.push(c); + } + } + mapped + } + } + } else { + vec![] + } + } + + pub fn host_path(&self, container_path: String) -> String { + let cp = PathBuf::from(container_path); + // get only possible mount points + // sort to deepest mountpoint first + // assumed deepest possible mount point takes precedence + let mut keys = self + .mounts + .clone() + .into_keys() + .filter(|mp| cp.as_path().starts_with(mp)) + .collect::<Vec<_>>(); + keys.sort_by_key(|a| 0 as isize - a.ancestors().count() as isize); + + // either return translated path or original prefixed with "container" + if keys.len() > 0 { + let mut matches = self.mounts[&keys[0]].clone().join( + cp.as_path() + .strip_prefix(keys[0].clone()) + .expect("error checking path prefix"), + ); + /* Observed on M1 Mac that Docker on OSX + * adds a bunch of garbage to the mount path + * converting it into a useless directory + * that doesnt actually exist + */ + if cfg!(target_os = "macos") { + let mut abs = PathBuf::from(String::from(MAIN_SEPARATOR)); + let m = matches + .strip_prefix("/host_mnt/private") + .unwrap_or(matches.strip_prefix("/host_mnt").unwrap_or(matches.as_path())); + // make it absolute again + abs.push(m); + matches = abs; + } + matches.to_string_lossy().to_string() + } else { + format!("<container>:{}", cp.display()) + } + } + + pub fn rewrite_socket(&self, command: String) -> String { + command + .split(" ") + .map(|tok| { + if tok.starts_with("unix:") { + format!( + "unix:{}", + self.host_path(tok.strip_prefix("unix:").unwrap().to_string()) + ) + } else { + tok.to_string() + } + }) + .collect::<Vec<_>>() + .join(" ") + } + + pub fn container_is_running(&self) -> Option<bool> { + self.details + .as_ref() + .and_then(|details| details.state.as_ref().and_then(|state| state.running)) + } +} + +/* deploys a new docker image of tag $image_tag. + * mounts $socket to /var/run in the new container. + * mounts $application read only to /www. + * new container is on host network. + * + * ON SUCCESS returns vector of warnings from Docker API + * ON FAILURE returns wrapped error from Docker API + */ +pub async fn deploy_new_container( + socket: ControlSocket, + application: &String, + application_read_only: bool, + image: &String, +) -> Result<Vec<String>, UnitClientError> { + match Docker::connect_with_local_defaults() { + Ok(docker) => { + let mut mounts = vec![]; + // if a unix socket is specified, mounts its directory + if socket.is_local_socket() { + let mount_path = PathBuf::from(socket.clone()).as_path().to_string_lossy().to_string(); + mounts.push(Mount { + typ: Some(MountTypeEnum::BIND), + source: Some(mount_path), + target: Some("/var/run".to_string()), + ..Default::default() + }); + } + // mount application dir + mounts.push(Mount { + typ: Some(MountTypeEnum::BIND), + source: Some(application.clone()), + target: Some("/www".to_string()), + read_only: Some(application_read_only), + ..Default::default() + }); + + let mut pb = ProgressBar::on(stderr(), 10); + let mut totals = HashMap::new(); + let mut stream = docker.create_image( + Some(CreateImageOptions { + from_image: image.as_str(), + ..Default::default() + }), + None, + None, + ); + while let Some(res) = stream.next().await { + if let Ok(info) = res { + if let Some(id) = info.id { + if let Some(_) = totals.get_mut(&id) { + if let Some(delta) = info.progress_detail.and_then(|detail| detail.current) { + pb.add(delta as u64); + } + } else { + if let Some(total) = info.progress_detail.and_then(|detail| detail.total) { + totals.insert(id, total); + pb.total += total as u64; + } + } + } + } + } + pb.finish(); + + // create the new unit container + let resp: ContainerCreateResponse; + let host_conf = HostConfig { + mounts: Some(mounts), + network_mode: Some("host".to_string()), + ..Default::default() + }; + let mut container_conf = Config { + image: Some(image.clone()), + ..Default::default() + }; + if let ControlSocket::TcpSocket(ref uri) = socket { + let port = uri.port_u16().or(Some(80)).unwrap(); + // override port + container_conf.cmd = Some(vec![ + "unitd".to_string(), + "--no-daemon".to_string(), + "--control".to_string(), + format!("{}:{}", uri.host().unwrap(), port), + ]); + } + container_conf.host_config = Some(host_conf); + match docker.create_container::<String, String>(None, container_conf).await { + Err(err) => { + return Err(UnitClientError::UnitdDockerError { + message: err.to_string(), + }) + } + Ok(response) => resp = response, + } + + // create container gives us an ID + // but start container requires a name + let mut list_container_filters = HashMap::new(); + list_container_filters.insert("id".to_string(), vec![resp.id]); + match docker + .list_containers::<String>(Some(ListContainersOptions { + all: true, + limit: None, + size: false, + filters: list_container_filters, + })) + .await + { + // somehow our container doesnt exist + Err(e) => Err(UnitClientError::UnitdDockerError { message: e.to_string() }), + // here it is! + Ok(info) => { + if info.len() < 1 { + return Err(UnitClientError::UnitdDockerError { + message: "couldnt find new container".to_string(), + }); + } else if info[0].names.is_none() || info[0].names.clone().unwrap().len() < 1 { + return Err(UnitClientError::UnitdDockerError { + message: "new container has no name".to_string(), + }); + } + + // start our container + match docker + .start_container( + info[0].names.clone().unwrap()[0].strip_prefix(MAIN_SEPARATOR).unwrap(), + None::<StartContainerOptions<String>>, + ) + .await + { + Err(err) => Err(UnitClientError::UnitdDockerError { + message: err.to_string(), + }), + Ok(_) => Ok(resp.warnings), + } + } + } + } + Err(e) => Err(UnitClientError::UnitdDockerError { message: e.to_string() }), + } +} + +/* Returns either 64 char docker container ID or None */ +pub fn pid_is_dockerized(pid: u64) -> bool { + let cg_filepath = format!("/proc/{}/cgroup", pid); + match read_to_string(cg_filepath) { + Err(e) => { + eprintln!("{}", e); + false + } + Ok(contents) => { + let docker_re = Regex::new(r"docker-([a-zA-Z0-9]{64})").unwrap(); + docker_re.is_match(contents.as_str()) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_path_translation() { + let mut mounts = HashMap::new(); + mounts.insert("/1/2/3/4/5/6/7".into(), "/0".into()); + mounts.insert("/root".into(), "/1".into()); + mounts.insert("/root/mid".into(), "/2".into()); + mounts.insert("/root/mid/child".into(), "/3".into()); + mounts.insert("/mid/child".into(), "/4".into()); + mounts.insert("/child".into(), "/5".into()); + mounts.insert("/var".into(), "/host_mnt/private/6".into()); + mounts.insert("/var/var".into(), "/host_mnt/7".into()); + + let ctr = UnitdContainer { + container_id: None, + container_image: String::from(""), + command: None, + platform: "test".to_string(), + details: None, + mounts: mounts, + }; + + assert_eq!( + "/3/c2/test".to_string(), + ctr.host_path("/root/mid/child/c2/test".to_string()) + ); + assert_eq!( + "<container>:/path/to/conf".to_string(), + ctr.host_path("/path/to/conf".to_string()) + ); + if cfg!(target_os = "macos") { + assert_eq!("/6/test".to_string(), ctr.host_path("/var/test".to_string())); + assert_eq!("/7/test".to_string(), ctr.host_path("/var/var/test".to_string())); + } + } + + #[test] + fn test_unix_sock_path_translate() { + let mut mounts = HashMap::new(); + mounts.insert("/var/run".into(), "/tmp".into()); + + let ctr = UnitdContainer { + container_id: None, + container_image: String::from(""), + command: None, + platform: "test".to_string(), + details: None, + mounts: mounts, + }; + + assert_eq!( + ctr.rewrite_socket("unitd --no-daemon --control unix:/var/run/control.unit.sock".to_string()), + "unitd --no-daemon --control unix:/tmp/control.unit.sock".to_string() + ); + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_instance.rs b/tools/unitctl/unit-client-rs/src/unitd_instance.rs new file mode 100644 index 00000000..ace8e858 --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_instance.rs @@ -0,0 +1,403 @@ +use crate::unit_client::UnitClientError; +use crate::unitd_docker::UnitdContainer; +use serde::ser::SerializeMap; +use serde::{Serialize, Serializer}; +use std::error::Error as StdError; +use std::path::{Path, PathBuf}; +use std::{fmt, io}; +use which::which; + +use crate::runtime_flags::RuntimeFlags; +use crate::unitd_configure_options::UnitdConfigureOptions; +use crate::unitd_process::UnitdProcess; + +pub const UNITD_PATH_ENV_KEY: &str = "UNITD_PATH"; +pub const UNITD_BINARY_NAMES: [&str; 2] = ["unitd", "unitd-debug"]; + +#[derive(Debug)] +pub struct UnitdInstance { + pub process: UnitdProcess, + pub configure_options: Option<UnitdConfigureOptions>, + pub errors: Vec<UnitClientError>, +} + +impl Serialize for UnitdInstance { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + // 11 = fields to serialize + let mut state = serializer.serialize_map(Some(11))?; + let runtime_flags = self + .process + .cmd() + .and_then(|cmd| cmd.flags) + .map(|flags| flags.to_string()); + + let configure_flags = self.configure_options.as_ref().map(|opts| opts.all_flags.clone()); + + state.serialize_entry("process", &self.process)?; + state.serialize_entry("version", &self.version())?; + state.serialize_entry("control_socket", &self.control_api_socket_address())?; + state.serialize_entry("log_path", &self.log_path())?; + state.serialize_entry("pid_path", &self.pid_path())?; + state.serialize_entry("modules_directory", &self.modules_directory())?; + state.serialize_entry("state_directory", &self.state_directory())?; + state.serialize_entry("tmp_directory", &self.tmp_directory())?; + state.serialize_entry("runtime_flags", &runtime_flags)?; + state.serialize_entry("configure_flags", &configure_flags)?; + let string_errors = &self.errors.iter().map(|e| e.to_string()).collect::<Vec<String>>(); + state.serialize_entry("errors", string_errors)?; + + state.end() + } +} + +impl UnitdInstance { + pub async fn running_unitd_instances() -> Vec<UnitdInstance> { + Self::collect_unitd_processes( + UnitdProcess::find_unitd_processes() + .into_iter() + .chain( + UnitdContainer::find_unitd_containers() + .await + .into_iter() + .map(|x| UnitdProcess::from(&x)) + .collect::<Vec<_>>(), + ) + .collect(), + ) + } + + /// Find all running unitd processes and convert them into UnitdInstances and filter + /// out all errors by printing them to stderr and leaving errored instances out of + /// the returned vector. + fn collect_unitd_processes(processes: Vec<UnitdProcess>) -> Vec<UnitdInstance> { + Self::map_processes_to_instances(processes).into_iter().collect() + } + + fn map_processes_to_instances(processes: Vec<UnitdProcess>) -> Vec<UnitdInstance> { + fn unitd_path_from_process(process: &UnitdProcess) -> Result<Box<Path>, UnitClientError> { + match process.executable_path() { + Some(executable_path) => { + let is_absolute_working_dir = process + .working_dir + .as_ref() + .map(|p| p.is_absolute()) + .unwrap_or_default(); + if executable_path.is_absolute() { + Ok(executable_path.to_owned()) + } else if executable_path.is_relative() && is_absolute_working_dir { + let new_path = process + .working_dir + .as_ref() + .unwrap() + .join(executable_path) + .canonicalize() + .map(|path| path.into_boxed_path()) + .map_err(|error| UnitClientError::UnitdProcessParseError { + message: format!("Error canonicalizing unitd executable path: {}", error), + pid: process.process_id, + })?; + Ok(new_path) + } else if process.container.is_none() { + Err(UnitClientError::UnitdProcessParseError { + message: "Unable to get absolute unitd executable path from process".to_string(), + pid: process.process_id, + }) + } else { + // container case + Ok(PathBuf::from("/").into_boxed_path()) + } + } + None => Err(UnitClientError::UnitdProcessParseError { + message: "Unable to get unitd executable path from process".to_string(), + pid: process.process_id, + }), + } + } + + fn map_process_to_unitd_instance(process: &UnitdProcess) -> UnitdInstance { + match unitd_path_from_process(process) { + Ok(_) if process.container.is_some() => { + let mut err = vec![]; + // double check that it is running + let running = process.container.as_ref().unwrap().container_is_running(); + + if running.is_none() || !running.unwrap() { + err.push(UnitClientError::UnitdProcessParseError { + message: "process container is not running".to_string(), + pid: process.process_id, + }); + } + + UnitdInstance { + process: process.to_owned(), + configure_options: None, + errors: err, + } + } + Ok(unitd_path) => match UnitdConfigureOptions::new(&unitd_path.clone().into_path_buf()) { + Ok(configure_options) => UnitdInstance { + process: process.to_owned(), + configure_options: Some(configure_options), + errors: vec![], + }, + Err(error) => { + let error = UnitClientError::UnitdProcessExecError { + source: error, + executable_path: unitd_path.to_string_lossy().parse().unwrap_or_default(), + message: "Error running unitd binary to get configure options".to_string(), + pid: process.process_id, + }; + UnitdInstance { + process: process.to_owned(), + configure_options: None, + errors: vec![error], + } + } + }, + Err(err) => UnitdInstance { + process: process.to_owned(), + configure_options: None, + errors: vec![err], + }, + } + } + + processes + .iter() + // This converts processes into a UnitdInstance + .map(map_process_to_unitd_instance) + .collect() + } + + fn version(&self) -> Option<String> { + match self.process.cmd()?.version { + Some(version) => Some(version), + None => self.configure_options.as_ref().map(|opts| opts.version.to_string()), + } + } + + fn flag_or_default_option<R>( + &self, + read_flag: fn(RuntimeFlags) -> Option<R>, + read_opts: fn(UnitdConfigureOptions) -> Option<R>, + ) -> Option<R> { + self.process + .cmd()? + .flags + .and_then(read_flag) + .or_else(|| self.configure_options.to_owned().and_then(read_opts)) + } + + pub fn control_api_socket_address(&self) -> Option<String> { + self.flag_or_default_option( + |flags| flags.control_api_socket_address(), + |opts| opts.default_control_api_socket_address(), + ) + } + + pub fn pid_path(&self) -> Option<Box<Path>> { + self.flag_or_default_option(|flags| flags.pid_path(), |opts| opts.default_pid_path()) + } + + pub fn log_path(&self) -> Option<Box<Path>> { + self.flag_or_default_option(|flags| flags.log_path(), |opts| opts.default_log_path()) + } + + pub fn modules_directory(&self) -> Option<Box<Path>> { + self.flag_or_default_option( + |flags| flags.modules_directory(), + |opts| opts.default_modules_directory(), + ) + } + + pub fn state_directory(&self) -> Option<Box<Path>> { + self.flag_or_default_option(|flags| flags.state_directory(), |opts| opts.default_state_directory()) + } + + pub fn tmp_directory(&self) -> Option<Box<Path>> { + self.flag_or_default_option(|flags| flags.tmp_directory(), |opts| opts.default_tmp_directory()) + } +} + +impl fmt::Display for UnitdInstance { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + const UNKNOWN: &str = "[unknown]"; + let version = self.version().unwrap_or_else(|| String::from("[unknown]")); + let runtime_flags = self + .process + .cmd() + .and_then(|cmd| cmd.flags) + .map(|flags| flags.to_string()) + .unwrap_or_else(|| UNKNOWN.into()); + let configure_flags = self + .configure_options + .as_ref() + .map(|opts| opts.all_flags.clone()) + .unwrap_or_else(|| UNKNOWN.into()); + let unitd_path: String = self + .process + .executable_path() + .map(|p| p.to_string_lossy().into()) + .unwrap_or_else(|| UNKNOWN.into()); + let working_dir: String = self + .process + .working_dir + .as_ref() + .map(|p| p.to_string_lossy().into()) + .unwrap_or_else(|| UNKNOWN.into()); + let socket_address = self.control_api_socket_address().unwrap_or_else(|| UNKNOWN.to_string()); + let child_pids = self + .process + .child_pids + .iter() + .map(u64::to_string) + .collect::<Vec<String>>() + .join(", "); + + writeln!( + f, + "{} instance [pid: {}, version: {}]:", + self.process.binary_name, self.process.process_id, version + )?; + writeln!(f, " Executable: {}", unitd_path)?; + writeln!(f, " Process working directory: {}", working_dir)?; + write!(f, " Process ownership: ")?; + if let Some(user) = &self.process.user { + writeln!(f, "name: {}, uid: {}, gid: {}", user.name, user.uid, user.gid)?; + } else { + writeln!(f, "{}", UNKNOWN)?; + } + write!(f, " Process effective ownership: ")?; + if let Some(user) = &self.process.effective_user { + writeln!(f, "name: {}, uid: {}, gid: {}", user.name, user.uid, user.gid)?; + } else { + writeln!(f, "{}", UNKNOWN)?; + } + + writeln!(f, " API control unix socket: {}", socket_address)?; + writeln!(f, " Child processes ids: {}", child_pids)?; + writeln!(f, " Runtime flags: {}", runtime_flags)?; + writeln!(f, " Configure options: {}", configure_flags)?; + + if let Some(ctr) = &self.process.container { + writeln!(f, " Container:")?; + writeln!(f, " Platform: {}", ctr.platform)?; + if let Some(id) = ctr.container_id.clone() { + writeln!(f, " Container ID: {}", id)?; + } + writeln!(f, " Mounts:")?; + for (k, v) in &ctr.mounts { + writeln!(f, " {} => {}", k.to_string_lossy(), v.to_string_lossy())?; + } + } + + if !self.errors.is_empty() { + write!(f, " Errors:")?; + for error in &self.errors { + write!(f, "\n {}", error)?; + } + } + + Ok(()) + } +} + +pub fn find_executable_path(specific_path: Result<String, Box<dyn StdError>>) -> Result<PathBuf, Box<dyn StdError>> { + fn find_unitd_in_system_path() -> Vec<PathBuf> { + UNITD_BINARY_NAMES + .iter() + .map(which) + .filter_map(Result::ok) + .collect::<Vec<PathBuf>>() + } + + match specific_path { + Ok(path) => Ok(PathBuf::from(path)), + Err(_) => { + let unitd_paths = find_unitd_in_system_path(); + if unitd_paths.is_empty() { + let err_msg = format!( + "Could not find unitd in system path or in UNITD_PATH environment variable. Searched for: {:?}", + UNITD_BINARY_NAMES + ); + let err = io::Error::new(io::ErrorKind::NotFound, err_msg); + Err(Box::from(err)) + } else { + Ok(unitd_paths[0].clone()) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::rngs::StdRng; + use rand::{RngCore, SeedableRng}; + + // We don't need a secure seed for testing, in fact it is better that we have a + // predictable value + const SEED: [u8; 32] = [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, + ]; + #[tokio::test] + async fn can_find_unitd_instances() { + UnitdInstance::running_unitd_instances().await.iter().for_each(|p| { + println!("{:?}", p); + println!("Runtime Flags: {:?}", p.process.cmd().map(|c| c.flags)); + println!("Temp directory: {:?}", p.tmp_directory()); + }) + } + + fn mock_process<S: Into<String>>( + rng: &mut StdRng, + binary_name: S, + executable_path: Option<String>, + ) -> UnitdProcess { + UnitdProcess { + process_id: rng.next_u32() as u64, + binary_name: binary_name.into(), + executable_path: executable_path.map(|p| Box::from(Path::new(&p))), + environ: vec![], + all_cmds: vec![], + working_dir: Some(Box::from(Path::new("/opt/unit"))), + child_pids: vec![], + user: None, + effective_user: None, + container: None, + } + } + + #[test] + fn will_list_without_errors_valid_processes() { + let specific_path = std::env::var(UNITD_PATH_ENV_KEY).map_err(|error| Box::new(error) as Box<dyn StdError>); + let binding = match find_executable_path(specific_path) { + Ok(path) => path, + Err(error) => { + eprintln!("Could not find unitd executable path: {} - skipping test", error); + return; + } + }; + let binary_name = binding + .file_name() + .expect("Could not get binary name") + .to_string_lossy() + .to_string(); + let unitd_path = binding.to_string_lossy(); + let mut rng: StdRng = SeedableRng::from_seed(SEED); + + let processes = vec![ + mock_process(&mut rng, &binary_name, Some(unitd_path.to_string())), + mock_process(&mut rng, &binary_name, Some(unitd_path.to_string())), + ]; + let instances = UnitdInstance::collect_unitd_processes(processes); + // assert_eq!(instances.len(), 3); + instances.iter().for_each(|p| { + assert_eq!(p.errors.len(), 0, "Expected no errors, got: {:?}", p.errors); + }) + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_process.rs b/tools/unitctl/unit-client-rs/src/unitd_process.rs new file mode 100644 index 00000000..3dc0c3af --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_process.rs @@ -0,0 +1,196 @@ +use crate::unitd_cmd::UnitdCmd; +use crate::unitd_docker::{pid_is_dockerized, UnitdContainer}; +use crate::unitd_instance::UNITD_BINARY_NAMES; +use crate::unitd_process_user::UnitdProcessUser; +use serde::ser::SerializeMap; +use serde::{Serialize, Serializer}; +use std::collections::HashMap; +use std::path::Path; +use sysinfo::{Pid, Process, ProcessRefreshKind, System, UpdateKind, Users}; + +#[derive(Debug, Clone)] +pub struct UnitdProcess { + pub binary_name: String, + pub process_id: u64, + pub executable_path: Option<Box<Path>>, + pub environ: Vec<String>, + pub all_cmds: Vec<String>, + pub working_dir: Option<Box<Path>>, + pub child_pids: Vec<u64>, + pub user: Option<UnitdProcessUser>, + pub effective_user: Option<UnitdProcessUser>, + pub container: Option<UnitdContainer>, +} + +impl Serialize for UnitdProcess { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + // 6 = fields to serialize + let mut state = serializer.serialize_map(Some(6))?; + state.serialize_entry("pid", &self.process_id)?; + state.serialize_entry("user", &self.user)?; + state.serialize_entry("effective_user", &self.effective_user)?; + state.serialize_entry("executable", &self.executable_path())?; + state.serialize_entry("child_pids", &self.child_pids)?; + state.serialize_entry("container", &self.container)?; + state.end() + } +} + +impl UnitdProcess { + pub fn find_unitd_processes() -> Vec<UnitdProcess> { + let process_refresh_kind = ProcessRefreshKind::new() + .with_cmd(UpdateKind::Always) + .with_cwd(UpdateKind::Always) + .with_exe(UpdateKind::Always) + .with_user(UpdateKind::Always); + let refresh_kind = sysinfo::RefreshKind::new().with_processes(process_refresh_kind); + let sys = System::new_with_specifics(refresh_kind); + let unitd_processes: HashMap<&Pid, &Process> = sys + .processes() + .iter() + .filter(|p| { + let process_name = p.1.name(); + UNITD_BINARY_NAMES.contains(&process_name) + }) + .collect::<HashMap<&Pid, &Process>>(); + let users = Users::new_with_refreshed_list(); + + unitd_processes + .iter() + // Filter out child processes + .filter(|p| { + #[cfg(target_os = "linux")] + if pid_is_dockerized(p.0.as_u32().into()) { + return false; + } + let parent_pid = p.1.parent(); + match parent_pid { + Some(pid) => !unitd_processes.contains_key(&pid), + None => false, + } + }) + .map(|p| { + let tuple = p.to_owned(); + /* The sysinfo library only supports 32-bit pids, yet larger values are possible + * if the OS is configured to support it, thus we use 64-bit integers internally + * because it is just a matter of time until the library changes to larger values. */ + let pid = *tuple.0; + let process = *tuple.1; + let process_id: u64 = pid.as_u32().into(); + let executable_path: Option<Box<Path>> = process.exe().map(|p| p.to_path_buf().into_boxed_path()); + let environ: Vec<String> = process.environ().into(); + let cmd: Vec<String> = process.cmd().into(); + let working_dir: Option<Box<Path>> = process.cwd().map(|p| p.to_path_buf().into_boxed_path()); + let child_pids = unitd_processes + .iter() + .filter_map(|p| p.to_owned().1.parent()) + .filter(|parent_pid| parent_pid == pid) + .map(|p| p.as_u32() as u64) + .collect::<Vec<u64>>(); + + let user = process + .user_id() + .and_then(|uid| users.get_user_by_id(uid)) + .map(UnitdProcessUser::from); + let effective_user = process + .effective_user_id() + .and_then(|uid| users.get_user_by_id(uid)) + .map(UnitdProcessUser::from); + + UnitdProcess { + binary_name: process.name().to_string(), + process_id, + executable_path, + environ, + all_cmds: cmd, + working_dir, + child_pids, + user, + effective_user, + container: None, + } + }) + .collect::<Vec<UnitdProcess>>() + } + + pub fn cmd(&self) -> Option<UnitdCmd> { + if self.all_cmds.is_empty() { + return None; + } + + match UnitdCmd::new(self.all_cmds[0].clone(), self.binary_name.as_ref()) { + Ok(cmd) => Some(cmd), + Err(error) => { + eprintln!("Failed to parse process cmd: {}", error); + None + } + } + } + + pub fn executable_path(&self) -> Option<Box<Path>> { + if self.executable_path.is_some() { + return self.executable_path.clone(); + } + self.cmd().and_then(|cmd| cmd.process_executable_path) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn can_parse_runtime_cmd_absolute_path(binary_name: &str) { + let cmd = format!( + "unit: main v1.28.0 [/usr/sbin/{} --log /var/log/unit.log --pid /var/run/unit.pid]", + binary_name + ); + let unitd_cmd = UnitdCmd::new(cmd, binary_name).expect("Failed to parse unitd cmd"); + assert_eq!(unitd_cmd.version.unwrap(), "1.28.0"); + assert_eq!( + unitd_cmd.process_executable_path.unwrap().to_string_lossy(), + format!("/usr/sbin/{}", binary_name) + ); + let flags = unitd_cmd.flags.unwrap(); + assert_eq!(flags.get_flag_value("log").unwrap(), "/var/log/unit.log"); + assert_eq!(flags.get_flag_value("pid").unwrap(), "/var/run/unit.pid"); + } + + fn can_parse_runtime_cmd_relative_path(binary_name: &str) { + let cmd = format!( + "unit: main v1.29.0 [./sbin/{} --no-daemon --tmp /tmp --something]", + binary_name + ); + let unitd_cmd = UnitdCmd::new(cmd, binary_name).expect("Failed to parse unitd cmd"); + assert_eq!(unitd_cmd.version.unwrap(), "1.29.0"); + assert_eq!( + unitd_cmd.process_executable_path.unwrap().to_string_lossy(), + format!("./sbin/{}", binary_name) + ); + let flags = unitd_cmd.flags.unwrap(); + assert_eq!(flags.get_flag_value("tmp").unwrap(), "/tmp"); + assert!(flags.has_flag("something")); + } + + #[test] + fn can_parse_runtime_cmd_unitd_absolute_path() { + can_parse_runtime_cmd_absolute_path("unitd"); + } + + #[test] + fn can_parse_runtime_cmd_unitd_debug_absolute_path() { + can_parse_runtime_cmd_absolute_path("unitd-debug"); + } + + #[test] + fn can_parse_runtime_cmd_unitd_relative_path() { + can_parse_runtime_cmd_relative_path("unitd"); + } + + #[test] + fn can_parse_runtime_cmd_unitd_debug_relative_path() { + can_parse_runtime_cmd_relative_path("unitd-debug"); + } +} diff --git a/tools/unitctl/unit-client-rs/src/unitd_process_user.rs b/tools/unitctl/unit-client-rs/src/unitd_process_user.rs new file mode 100644 index 00000000..c4f9be22 --- /dev/null +++ b/tools/unitctl/unit-client-rs/src/unitd_process_user.rs @@ -0,0 +1,36 @@ +use serde::Serialize; +use std::fmt; +use std::fmt::Display; +use sysinfo::User; + +#[derive(Debug, Clone, Serialize)] +pub struct UnitdProcessUser { + pub name: String, + pub uid: u32, + pub gid: u32, + pub groups: Vec<String>, +} + +impl Display for UnitdProcessUser { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "name: {}, uid: {}, gid: {}, groups: {}", + self.name, + self.uid, + self.gid, + self.groups.join(", ") + ) + } +} + +impl From<&User> for UnitdProcessUser { + fn from(user: &User) -> Self { + UnitdProcessUser { + name: user.name().into(), + uid: *user.id().clone(), + gid: *user.group_id(), + groups: user.groups().iter().map(|g| g.name().into()).collect(), + } + } +} diff --git a/tools/unitctl/unit-openapi/.gitattributes b/tools/unitctl/unit-openapi/.gitattributes new file mode 100644 index 00000000..b4361577 --- /dev/null +++ b/tools/unitctl/unit-openapi/.gitattributes @@ -0,0 +1 @@ +README.md whitespace=-blank-at-eof diff --git a/tools/unitctl/unit-openapi/.gitignore b/tools/unitctl/unit-openapi/.gitignore new file mode 100644 index 00000000..830fc6b7 --- /dev/null +++ b/tools/unitctl/unit-openapi/.gitignore @@ -0,0 +1,4 @@ +.openapi-generator/ +/target/ +**/*.rs.bk +Cargo.lock diff --git a/tools/unitctl/unit-openapi/.openapi-generator-ignore b/tools/unitctl/unit-openapi/.openapi-generator-ignore new file mode 100644 index 00000000..aa9e0e40 --- /dev/null +++ b/tools/unitctl/unit-openapi/.openapi-generator-ignore @@ -0,0 +1,27 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md + +src/apis/error.rs +.travis.yml +git_push.sh
\ No newline at end of file diff --git a/tools/unitctl/unit-openapi/Cargo.toml b/tools/unitctl/unit-openapi/Cargo.toml new file mode 100644 index 00000000..c7a177f9 --- /dev/null +++ b/tools/unitctl/unit-openapi/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "unit-openapi" +version = "1.33.0" +authors = ["unit-owner@nginx.org"] +description = "NGINX Unit is a lightweight and versatile application runtime that provides the essential components for your web application as a single open-source server: running application code, serving static assets, handling TLS and request routing. **Important**: Unit's API is designed to expose any part of its configuration as an addressable endpoint. Suppose a JSON object is stored at `/config/listeners/`: ```json { \"*:8080\": { \"pass\": \"applications/wp_emea_dev\" } } ``` Here, `/config/listeners/_*:8080` and `/config/listeners/_*:8080/pass` are also endpoints. Generally, object options are addressable by their names, array items—by their indexes (`/array/0/`). **Note**: By default, Unit is configured through a UNIX domain socket. To use this specification with OpenAPI tools interactively, [start](https://unit.nginx.org/howto/source/#source-startup) Unit with a TCP port as the control socket." +license = "Apache 2.0" +edition = "2018" + +[dependencies] +serde = "1.0" +serde_derive = "1.0" +serde_json = "1.0" +url = "2.2" +hyper = { version = "0.14" } +http = "0.2" +base64 = "0.21" +futures = "0.3" diff --git a/tools/unitctl/unit-openapi/README.md b/tools/unitctl/unit-openapi/README.md new file mode 100644 index 00000000..3a792b6e --- /dev/null +++ b/tools/unitctl/unit-openapi/README.md @@ -0,0 +1,410 @@ +# Rust API client for unit-openapi + +NGINX Unit is a lightweight and versatile application runtime that provides the essential components for your web application as a single open-source server: running application code, serving static assets, handling TLS and request routing. + + +**Important**: Unit's API is designed to expose any part of its configuration as an addressable endpoint. Suppose a JSON object is stored at `/config/listeners/`: + + +```json { \"*:8080\": { \"pass\": \"applications/wp_emea_dev\" } } ``` + +Here, `/config/listeners/_*:8080` and `/config/listeners/_*:8080/pass` are also endpoints. Generally, object options are addressable by their names, array items—by their indexes (`/array/0/`). + + + +**Note**: By default, Unit is configured through a UNIX domain socket. To use this specification with OpenAPI tools interactively, [start](https://unit.nginx.org/howto/source/#source-startup) Unit with a TCP port as the control socket. + +For more information, please visit [https://unit.nginx.org/](https://unit.nginx.org/) + +## Overview + +This API client was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the [openapi-spec](https://openapis.org) from a remote server, you can easily generate an API client. + +- API version: 0.2.0 +- Package version: 1.33.0 +- Generator version: 7.6.0 +- Build package: `org.openapitools.codegen.languages.RustClientCodegen` + +## Installation + +Put the package under your project folder in a directory named `unit-openapi` and add the following to `Cargo.toml` under `[dependencies]`: + +``` +unit-openapi = { path = "./unit-openapi" } +``` + +## Documentation for API Endpoints + +All URIs are relative to *http://localhost:8080* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*AccessLogApi* | [**delete_access_log**](docs/AccessLogApi.md#delete_access_log) | **Delete** /config/access_log | Delete the access log +*AccessLogApi* | [**delete_access_log_format**](docs/AccessLogApi.md#delete_access_log_format) | **Delete** /config/access_log/format | Delete the access log format +*AccessLogApi* | [**delete_access_log_path**](docs/AccessLogApi.md#delete_access_log_path) | **Delete** /config/access_log/path | Delete the access log path +*AccessLogApi* | [**get_access_log**](docs/AccessLogApi.md#get_access_log) | **Get** /config/access_log | Retrieve the access log +*AccessLogApi* | [**get_access_log_format**](docs/AccessLogApi.md#get_access_log_format) | **Get** /config/access_log/format | Retrieve the access log format option +*AccessLogApi* | [**get_access_log_path**](docs/AccessLogApi.md#get_access_log_path) | **Get** /config/access_log/path | Retrieve the access log path option +*AccessLogApi* | [**update_access_log**](docs/AccessLogApi.md#update_access_log) | **Put** /config/access_log | Create or overwrite the access log +*AccessLogApi* | [**update_access_log_format**](docs/AccessLogApi.md#update_access_log_format) | **Put** /config/access_log/format | Create or overwrite the access log format +*AccessLogApi* | [**update_access_log_path**](docs/AccessLogApi.md#update_access_log_path) | **Put** /config/access_log/path | Create or overwrite the access log path +*ApplicationsApi* | [**delete_application**](docs/ApplicationsApi.md#delete_application) | **Delete** /config/applications/{appName} | Delete the application object +*ApplicationsApi* | [**delete_applications**](docs/ApplicationsApi.md#delete_applications) | **Delete** /config/applications | Delete the applications object +*ApplicationsApi* | [**get_application**](docs/ApplicationsApi.md#get_application) | **Get** /config/applications/{appName} | Retrieve an application object +*ApplicationsApi* | [**get_applications**](docs/ApplicationsApi.md#get_applications) | **Get** /config/applications | Retrieve the applications object +*ApplicationsApi* | [**update_application**](docs/ApplicationsApi.md#update_application) | **Put** /config/applications/{appName} | Create or overwrite the application object +*ApplicationsApi* | [**update_applications**](docs/ApplicationsApi.md#update_applications) | **Put** /config/applications | Overwrite the applications object +*AppsApi* | [**get_app_restart**](docs/AppsApi.md#get_app_restart) | **Get** /control/applications/{appName}/restart | Restart the {appName} application +*CertificatesApi* | [**get_cert_bundle**](docs/CertificatesApi.md#get_cert_bundle) | **Get** /certificates/{bundleName} | Retrieve the certificate bundle object +*CertificatesApi* | [**get_cert_bundle_chain**](docs/CertificatesApi.md#get_cert_bundle_chain) | **Get** /certificates/{bundleName}/chain | Retrieve the certificate bundle chain +*CertificatesApi* | [**get_cert_bundle_chain_cert**](docs/CertificatesApi.md#get_cert_bundle_chain_cert) | **Get** /certificates/{bundleName}/chain/{arrayIndex} | Retrieve certificate object from the chain array +*CertificatesApi* | [**get_cert_bundle_chain_cert_issuer**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_issuer) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/issuer | Retrieve the issuer object from the certificate object +*CertificatesApi* | [**get_cert_bundle_chain_cert_issuer_cn**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_issuer_cn) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/issuer/common_name | Retrieve the common name from the certificate issuer +*CertificatesApi* | [**get_cert_bundle_chain_cert_issuer_org**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_issuer_org) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/issuer/organization | Retrieve the organization name from the certificate issuer +*CertificatesApi* | [**get_cert_bundle_chain_cert_issuer_state**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_issuer_state) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/issuer/state_or_province | Retrieve the state or province code from the certificate issuer +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject | Retrieve the subject from the certificate object +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_alt**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_alt) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/alt_names/{arrayIndex2} | Retrieve an alternative name from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_alt_array**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_alt_array) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/alt_names | Retrieve the alternative names array from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_cn**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_cn) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/common_name | Retrieve the common name from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_country**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_country) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/country | Retrieve the country code from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_org**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_org) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/organization | Retrieve the organization name from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_subj_state**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_subj_state) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/subject/state_or_province | Retrieve the state or province code from the certificate subject +*CertificatesApi* | [**get_cert_bundle_chain_cert_valid**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_valid) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/validity | Retrieve the validity object from the certificate object +*CertificatesApi* | [**get_cert_bundle_chain_cert_valid_since**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_valid_since) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/validity/since | Retrieve the starting time of certificate validity +*CertificatesApi* | [**get_cert_bundle_chain_cert_valid_until**](docs/CertificatesApi.md#get_cert_bundle_chain_cert_valid_until) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/validity/until | Retrieve the ending time of certificate validity +*CertificatesApi* | [**get_cert_bundle_chain_certissuer_country**](docs/CertificatesApi.md#get_cert_bundle_chain_certissuer_country) | **Get** /certificates/{bundleName}/chain/{arrayIndex}/issuer/country | Retrieve the country code from the certificate issuer +*CertificatesApi* | [**get_cert_bundle_key**](docs/CertificatesApi.md#get_cert_bundle_key) | **Get** /certificates/{bundleName}/key | Retrieve the certificate bundle key type +*CertificatesApi* | [**get_certs**](docs/CertificatesApi.md#get_certs) | **Get** /certificates | Retrieve the certificates object +*CertificatesApi* | [**put_cert_bundle**](docs/CertificatesApi.md#put_cert_bundle) | **Put** /certificates/{bundleName} | Create or overwrite the actual certificate bundle +*ConfigApi* | [**delete_access_log**](docs/ConfigApi.md#delete_access_log) | **Delete** /config/access_log | Delete the access log +*ConfigApi* | [**delete_access_log_format**](docs/ConfigApi.md#delete_access_log_format) | **Delete** /config/access_log/format | Delete the access log format +*ConfigApi* | [**delete_access_log_path**](docs/ConfigApi.md#delete_access_log_path) | **Delete** /config/access_log/path | Delete the access log path +*ConfigApi* | [**delete_application**](docs/ConfigApi.md#delete_application) | **Delete** /config/applications/{appName} | Delete the application object +*ConfigApi* | [**delete_applications**](docs/ConfigApi.md#delete_applications) | **Delete** /config/applications | Delete the applications object +*ConfigApi* | [**delete_config**](docs/ConfigApi.md#delete_config) | **Delete** /config | Delete the config object +*ConfigApi* | [**delete_listener**](docs/ConfigApi.md#delete_listener) | **Delete** /config/listeners/{listenerName} | Delete a listener object +*ConfigApi* | [**delete_listener_forwarded_recursive**](docs/ConfigApi.md#delete_listener_forwarded_recursive) | **Delete** /config/listeners/{listenerName}/forwarded/recursive | Delete the recursive object in a listener +*ConfigApi* | [**delete_listener_forwarded_source**](docs/ConfigApi.md#delete_listener_forwarded_source) | **Delete** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Delete a source array item in a listener +*ConfigApi* | [**delete_listener_forwarded_sources**](docs/ConfigApi.md#delete_listener_forwarded_sources) | **Delete** /config/listeners/{listenerName}/forwarded/source | Delete the source option in a listener +*ConfigApi* | [**delete_listener_forwared**](docs/ConfigApi.md#delete_listener_forwared) | **Delete** /config/listeners/{listenerName}/forwarded | Delete the forwarded object in a listener +*ConfigApi* | [**delete_listener_tls**](docs/ConfigApi.md#delete_listener_tls) | **Delete** /config/listeners/{listenerName}/tls | Delete the tls object in a listener +*ConfigApi* | [**delete_listener_tls_certificate**](docs/ConfigApi.md#delete_listener_tls_certificate) | **Delete** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Delete a certificate array item in a listener +*ConfigApi* | [**delete_listener_tls_certificates**](docs/ConfigApi.md#delete_listener_tls_certificates) | **Delete** /config/listeners/{listenerName}/tls/certificate | Delete the certificate option in a listener +*ConfigApi* | [**delete_listener_tls_conf_commands**](docs/ConfigApi.md#delete_listener_tls_conf_commands) | **Delete** /config/listeners/{listenerName}/tls/conf_commands | Delete the conf_commands object in a listener +*ConfigApi* | [**delete_listener_tls_session**](docs/ConfigApi.md#delete_listener_tls_session) | **Delete** /config/listeners/{listenerName}/tls/session | Delete the session object in a listener +*ConfigApi* | [**delete_listener_tls_session_ticket**](docs/ConfigApi.md#delete_listener_tls_session_ticket) | **Delete** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Delete a ticket array item in a listener +*ConfigApi* | [**delete_listener_tls_session_tickets**](docs/ConfigApi.md#delete_listener_tls_session_tickets) | **Delete** /config/listeners/{listenerName}/tls/session/tickets | Delete the tickets option in a listener +*ConfigApi* | [**delete_listeners**](docs/ConfigApi.md#delete_listeners) | **Delete** /config/listeners | Delete all the listeners +*ConfigApi* | [**delete_routes**](docs/ConfigApi.md#delete_routes) | **Delete** /config/routes | Delete the routes entity +*ConfigApi* | [**delete_settings**](docs/ConfigApi.md#delete_settings) | **Delete** /config/settings | Delete the settings object +*ConfigApi* | [**delete_settings_discard_unsafe_fields**](docs/ConfigApi.md#delete_settings_discard_unsafe_fields) | **Delete** /config/settings/http/discard_unsafe_fields | Delete the discard_unsafe_fields option +*ConfigApi* | [**delete_settings_http**](docs/ConfigApi.md#delete_settings_http) | **Delete** /config/settings/http | Delete the http object +*ConfigApi* | [**delete_settings_http_body_read_timeout**](docs/ConfigApi.md#delete_settings_http_body_read_timeout) | **Delete** /config/settings/http/body_read_timeout | Delete the body_read_timeout option +*ConfigApi* | [**delete_settings_http_header_read_timeout**](docs/ConfigApi.md#delete_settings_http_header_read_timeout) | **Delete** /config/settings/http/header_read_timeout | Delete the header_read_timeout option +*ConfigApi* | [**delete_settings_http_idle_timeout**](docs/ConfigApi.md#delete_settings_http_idle_timeout) | **Delete** /config/settings/http/idle_timeout | Delete the idle_timeout option +*ConfigApi* | [**delete_settings_http_max_body_size**](docs/ConfigApi.md#delete_settings_http_max_body_size) | **Delete** /config/settings/http/max_body_size | Delete the max_body_size option +*ConfigApi* | [**delete_settings_http_send_timeout**](docs/ConfigApi.md#delete_settings_http_send_timeout) | **Delete** /config/settings/http/send_timeout | Delete the send_timeout option +*ConfigApi* | [**delete_settings_http_static**](docs/ConfigApi.md#delete_settings_http_static) | **Delete** /config/settings/http/static | Delete the static object +*ConfigApi* | [**delete_settings_http_static_mime_type**](docs/ConfigApi.md#delete_settings_http_static_mime_type) | **Delete** /config/settings/http/static/mime_types/{mimeType} | Delete the MIME type option +*ConfigApi* | [**delete_settings_http_static_mime_types**](docs/ConfigApi.md#delete_settings_http_static_mime_types) | **Delete** /config/settings/http/static/mime_types | Delete the mime_types object +*ConfigApi* | [**delete_settings_log_route**](docs/ConfigApi.md#delete_settings_log_route) | **Delete** /config/settings/http/log_route | Delete the log_route option +*ConfigApi* | [**delete_settings_server_version**](docs/ConfigApi.md#delete_settings_server_version) | **Delete** /config/settings/http/server_version | Delete the server_version option +*ConfigApi* | [**get_access_log**](docs/ConfigApi.md#get_access_log) | **Get** /config/access_log | Retrieve the access log +*ConfigApi* | [**get_access_log_format**](docs/ConfigApi.md#get_access_log_format) | **Get** /config/access_log/format | Retrieve the access log format option +*ConfigApi* | [**get_access_log_path**](docs/ConfigApi.md#get_access_log_path) | **Get** /config/access_log/path | Retrieve the access log path option +*ConfigApi* | [**get_application**](docs/ConfigApi.md#get_application) | **Get** /config/applications/{appName} | Retrieve an application object +*ConfigApi* | [**get_applications**](docs/ConfigApi.md#get_applications) | **Get** /config/applications | Retrieve the applications object +*ConfigApi* | [**get_config**](docs/ConfigApi.md#get_config) | **Get** /config | Retrieve the config +*ConfigApi* | [**get_listener**](docs/ConfigApi.md#get_listener) | **Get** /config/listeners/{listenerName} | Retrieve a listener object +*ConfigApi* | [**get_listener_forwarded**](docs/ConfigApi.md#get_listener_forwarded) | **Get** /config/listeners/{listenerName}/forwarded | Retrieve the forwarded object in a listener +*ConfigApi* | [**get_listener_forwarded_client_ip**](docs/ConfigApi.md#get_listener_forwarded_client_ip) | **Get** /config/listeners/{listenerName}/forwarded/client_ip | Retrieve the client_ip option in a listener +*ConfigApi* | [**get_listener_forwarded_protocol**](docs/ConfigApi.md#get_listener_forwarded_protocol) | **Get** /config/listeners/{listenerName}/forwarded/protocol | Retrieve the protocol option in a listener +*ConfigApi* | [**get_listener_forwarded_recursive**](docs/ConfigApi.md#get_listener_forwarded_recursive) | **Get** /config/listeners/{listenerName}/forwarded/recursive | Retrieve the recursive option in a listener +*ConfigApi* | [**get_listener_forwarded_source**](docs/ConfigApi.md#get_listener_forwarded_source) | **Get** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Retrieve a source array item in a listener +*ConfigApi* | [**get_listener_pass**](docs/ConfigApi.md#get_listener_pass) | **Get** /config/listeners/{listenerName}/pass | Retrieve the pass option in a listener +*ConfigApi* | [**get_listener_tls**](docs/ConfigApi.md#get_listener_tls) | **Get** /config/listeners/{listenerName}/tls | Retrieve the tls object in a listener +*ConfigApi* | [**get_listener_tls_certificate**](docs/ConfigApi.md#get_listener_tls_certificate) | **Get** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Retrieve a certificate array item in a listener +*ConfigApi* | [**get_listener_tls_session**](docs/ConfigApi.md#get_listener_tls_session) | **Get** /config/listeners/{listenerName}/tls/session | Retrieve the session object in a listener +*ConfigApi* | [**get_listener_tls_session_ticket**](docs/ConfigApi.md#get_listener_tls_session_ticket) | **Get** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Retrieve a ticket array item in a listener +*ConfigApi* | [**get_listeners**](docs/ConfigApi.md#get_listeners) | **Get** /config/listeners | Retrieve all the listeners +*ConfigApi* | [**get_routes**](docs/ConfigApi.md#get_routes) | **Get** /config/routes | Retrieve the routes entity +*ConfigApi* | [**get_settings**](docs/ConfigApi.md#get_settings) | **Get** /config/settings | Retrieve the settings object +*ConfigApi* | [**get_settings_discard_unsafe_fields**](docs/ConfigApi.md#get_settings_discard_unsafe_fields) | **Get** /config/settings/http/discard_unsafe_fields | Retrieve the discard_unsafe_fields option from http settings +*ConfigApi* | [**get_settings_http**](docs/ConfigApi.md#get_settings_http) | **Get** /config/settings/http | Retrieve the http object from settings +*ConfigApi* | [**get_settings_http_body_read_timeout**](docs/ConfigApi.md#get_settings_http_body_read_timeout) | **Get** /config/settings/http/body_read_timeout | Retrieve the body_read_timeout option from http settings +*ConfigApi* | [**get_settings_http_header_read_timeout**](docs/ConfigApi.md#get_settings_http_header_read_timeout) | **Get** /config/settings/http/header_read_timeout | Retrieve the header_read_timeout option from http settings +*ConfigApi* | [**get_settings_http_idle_timeout**](docs/ConfigApi.md#get_settings_http_idle_timeout) | **Get** /config/settings/http/idle_timeout | Retrieve the idle_timeout option from http settings +*ConfigApi* | [**get_settings_http_max_body_size**](docs/ConfigApi.md#get_settings_http_max_body_size) | **Get** /config/settings/http/max_body_size | Retrieve the max_body_size option from http settings +*ConfigApi* | [**get_settings_http_send_timeout**](docs/ConfigApi.md#get_settings_http_send_timeout) | **Get** /config/settings/http/send_timeout | Retrieve the send_timeout option from http settings +*ConfigApi* | [**get_settings_http_static**](docs/ConfigApi.md#get_settings_http_static) | **Get** /config/settings/http/static | Retrieve the static object from http settings +*ConfigApi* | [**get_settings_http_static_mime_type**](docs/ConfigApi.md#get_settings_http_static_mime_type) | **Get** /config/settings/http/static/mime_types/{mimeType} | Retrieve the MIME type option from MIME type settings +*ConfigApi* | [**get_settings_http_static_mime_types**](docs/ConfigApi.md#get_settings_http_static_mime_types) | **Get** /config/settings/http/static/mime_types | Retrieve the mime_types object from static settings +*ConfigApi* | [**get_settings_log_route**](docs/ConfigApi.md#get_settings_log_route) | **Get** /config/settings/http/log_route | Retrieve the log_route option from http settings +*ConfigApi* | [**get_settings_server_version**](docs/ConfigApi.md#get_settings_server_version) | **Get** /config/settings/http/server_version | Retrieve the server_version option from http settings +*ConfigApi* | [**insert_listener_forwarded_source**](docs/ConfigApi.md#insert_listener_forwarded_source) | **Post** /config/listeners/{listenerName}/forwarded/source | Add a new source array item in a listener +*ConfigApi* | [**insert_listener_tls_certificate**](docs/ConfigApi.md#insert_listener_tls_certificate) | **Post** /config/listeners/{listenerName}/tls/certificate | Add a new certificate array item in a listener +*ConfigApi* | [**insert_listener_tls_session_ticket**](docs/ConfigApi.md#insert_listener_tls_session_ticket) | **Post** /config/listeners/{listenerName}/tls/session/tickets | Add a new tickets array item in a listener +*ConfigApi* | [**list_listener_forwarded_sources**](docs/ConfigApi.md#list_listener_forwarded_sources) | **Get** /config/listeners/{listenerName}/forwarded/source | Retrieve the source option in a listener +*ConfigApi* | [**list_listener_tls_certificates**](docs/ConfigApi.md#list_listener_tls_certificates) | **Get** /config/listeners/{listenerName}/tls/certificate | Retrieve the certificate option in a listener +*ConfigApi* | [**list_listener_tls_conf_commands**](docs/ConfigApi.md#list_listener_tls_conf_commands) | **Get** /config/listeners/{listenerName}/tls/conf_commands | Retrieve the conf_commands object in a listener +*ConfigApi* | [**list_listener_tls_session_tickets**](docs/ConfigApi.md#list_listener_tls_session_tickets) | **Get** /config/listeners/{listenerName}/tls/session/tickets | Retrieve the tickets option in a listener +*ConfigApi* | [**update_access_log**](docs/ConfigApi.md#update_access_log) | **Put** /config/access_log | Create or overwrite the access log +*ConfigApi* | [**update_access_log_format**](docs/ConfigApi.md#update_access_log_format) | **Put** /config/access_log/format | Create or overwrite the access log format +*ConfigApi* | [**update_access_log_path**](docs/ConfigApi.md#update_access_log_path) | **Put** /config/access_log/path | Create or overwrite the access log path +*ConfigApi* | [**update_application**](docs/ConfigApi.md#update_application) | **Put** /config/applications/{appName} | Create or overwrite the application object +*ConfigApi* | [**update_applications**](docs/ConfigApi.md#update_applications) | **Put** /config/applications | Overwrite the applications object +*ConfigApi* | [**update_config**](docs/ConfigApi.md#update_config) | **Put** /config | Create or overwrite the config +*ConfigApi* | [**update_listener**](docs/ConfigApi.md#update_listener) | **Put** /config/listeners/{listenerName} | Create or overwrite a listener object +*ConfigApi* | [**update_listener_forwarded**](docs/ConfigApi.md#update_listener_forwarded) | **Put** /config/listeners/{listenerName}/forwarded | Create or overwrite the forwarded object in a listener +*ConfigApi* | [**update_listener_forwarded_client_ip**](docs/ConfigApi.md#update_listener_forwarded_client_ip) | **Put** /config/listeners/{listenerName}/forwarded/client_ip | Create or overwrite the client_ip option in a listener +*ConfigApi* | [**update_listener_forwarded_protocol**](docs/ConfigApi.md#update_listener_forwarded_protocol) | **Put** /config/listeners/{listenerName}/forwarded/protocol | Create or overwrite the protocol option in a listener +*ConfigApi* | [**update_listener_forwarded_recursive**](docs/ConfigApi.md#update_listener_forwarded_recursive) | **Put** /config/listeners/{listenerName}/forwarded/recursive | Create or overwrite the recursive option in a listener +*ConfigApi* | [**update_listener_forwarded_source**](docs/ConfigApi.md#update_listener_forwarded_source) | **Put** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Update a source array item in a listener +*ConfigApi* | [**update_listener_forwarded_sources**](docs/ConfigApi.md#update_listener_forwarded_sources) | **Put** /config/listeners/{listenerName}/forwarded/source | Create or overwrite the source option in a listener +*ConfigApi* | [**update_listener_pass**](docs/ConfigApi.md#update_listener_pass) | **Put** /config/listeners/{listenerName}/pass | Update the pass option in a listener +*ConfigApi* | [**update_listener_tls**](docs/ConfigApi.md#update_listener_tls) | **Put** /config/listeners/{listenerName}/tls | Create or overwrite the tls object in a listener +*ConfigApi* | [**update_listener_tls_certificate**](docs/ConfigApi.md#update_listener_tls_certificate) | **Put** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Update a certificate array item in a listener +*ConfigApi* | [**update_listener_tls_certificates**](docs/ConfigApi.md#update_listener_tls_certificates) | **Put** /config/listeners/{listenerName}/tls/certificate | Create or overwrite the certificate option in a listener +*ConfigApi* | [**update_listener_tls_conf_commands**](docs/ConfigApi.md#update_listener_tls_conf_commands) | **Put** /config/listeners/{listenerName}/tls/conf_commands | Create or overwrite the conf_commands object in a listener +*ConfigApi* | [**update_listener_tls_session**](docs/ConfigApi.md#update_listener_tls_session) | **Put** /config/listeners/{listenerName}/tls/session | Create or overwrite the session object in a listener +*ConfigApi* | [**update_listener_tls_session_ticket**](docs/ConfigApi.md#update_listener_tls_session_ticket) | **Put** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Create or overwrite a ticket array item in a listener +*ConfigApi* | [**update_listener_tls_session_tickets**](docs/ConfigApi.md#update_listener_tls_session_tickets) | **Put** /config/listeners/{listenerName}/tls/session/tickets | Create or overwrite the tickets option in a listener +*ConfigApi* | [**update_listeners**](docs/ConfigApi.md#update_listeners) | **Put** /config/listeners | Create or overwrite all the listeners +*ConfigApi* | [**update_routes**](docs/ConfigApi.md#update_routes) | **Put** /config/routes | Overwrite the routes entity +*ConfigApi* | [**update_settings**](docs/ConfigApi.md#update_settings) | **Put** /config/settings | Create or overwrite the settings object +*ConfigApi* | [**update_settings_discard_unsafe_fields**](docs/ConfigApi.md#update_settings_discard_unsafe_fields) | **Put** /config/settings/http/discard_unsafe_fields | Create or overwrite the discard_unsafe_fields option +*ConfigApi* | [**update_settings_http**](docs/ConfigApi.md#update_settings_http) | **Put** /config/settings/http | Create or overwrite the http object +*ConfigApi* | [**update_settings_http_body_read_timeout**](docs/ConfigApi.md#update_settings_http_body_read_timeout) | **Put** /config/settings/http/body_read_timeout | Create or overwrite the body_read_timeout option +*ConfigApi* | [**update_settings_http_header_read_timeout**](docs/ConfigApi.md#update_settings_http_header_read_timeout) | **Put** /config/settings/http/header_read_timeout | Create or overwrite the header_read_timeout option +*ConfigApi* | [**update_settings_http_idle_timeout**](docs/ConfigApi.md#update_settings_http_idle_timeout) | **Put** /config/settings/http/idle_timeout | Create or overwrite the idle_timeout option +*ConfigApi* | [**update_settings_http_max_body_size**](docs/ConfigApi.md#update_settings_http_max_body_size) | **Put** /config/settings/http/max_body_size | Create or overwrite the max_body_size option +*ConfigApi* | [**update_settings_http_send_timeout**](docs/ConfigApi.md#update_settings_http_send_timeout) | **Put** /config/settings/http/send_timeout | Create or overwrite the send_timeout option +*ConfigApi* | [**update_settings_http_static**](docs/ConfigApi.md#update_settings_http_static) | **Put** /config/settings/http/static | Create or overwrite the static object +*ConfigApi* | [**update_settings_http_static_mime_type**](docs/ConfigApi.md#update_settings_http_static_mime_type) | **Put** /config/settings/http/static/mime_types/{mimeType} | Create or overwrite the MIME type option +*ConfigApi* | [**update_settings_http_static_mime_types**](docs/ConfigApi.md#update_settings_http_static_mime_types) | **Put** /config/settings/http/static/mime_types | Create or overwrite the mime_types object +*ConfigApi* | [**update_settings_log_route**](docs/ConfigApi.md#update_settings_log_route) | **Put** /config/settings/http/log_route | Create or overwrite the log_route option +*ConfigApi* | [**update_settings_server_version**](docs/ConfigApi.md#update_settings_server_version) | **Put** /config/settings/http/server_version | Create or overwrite the server_version option +*ControlApi* | [**get_app_restart**](docs/ControlApi.md#get_app_restart) | **Get** /control/applications/{appName}/restart | Restart the {appName} application +*ListenersApi* | [**delete_listener**](docs/ListenersApi.md#delete_listener) | **Delete** /config/listeners/{listenerName} | Delete a listener object +*ListenersApi* | [**delete_listener_forwarded_recursive**](docs/ListenersApi.md#delete_listener_forwarded_recursive) | **Delete** /config/listeners/{listenerName}/forwarded/recursive | Delete the recursive object in a listener +*ListenersApi* | [**delete_listener_forwarded_source**](docs/ListenersApi.md#delete_listener_forwarded_source) | **Delete** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Delete a source array item in a listener +*ListenersApi* | [**delete_listener_forwarded_sources**](docs/ListenersApi.md#delete_listener_forwarded_sources) | **Delete** /config/listeners/{listenerName}/forwarded/source | Delete the source option in a listener +*ListenersApi* | [**delete_listener_forwared**](docs/ListenersApi.md#delete_listener_forwared) | **Delete** /config/listeners/{listenerName}/forwarded | Delete the forwarded object in a listener +*ListenersApi* | [**delete_listener_tls**](docs/ListenersApi.md#delete_listener_tls) | **Delete** /config/listeners/{listenerName}/tls | Delete the tls object in a listener +*ListenersApi* | [**delete_listener_tls_certificate**](docs/ListenersApi.md#delete_listener_tls_certificate) | **Delete** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Delete a certificate array item in a listener +*ListenersApi* | [**delete_listener_tls_certificates**](docs/ListenersApi.md#delete_listener_tls_certificates) | **Delete** /config/listeners/{listenerName}/tls/certificate | Delete the certificate option in a listener +*ListenersApi* | [**delete_listener_tls_conf_commands**](docs/ListenersApi.md#delete_listener_tls_conf_commands) | **Delete** /config/listeners/{listenerName}/tls/conf_commands | Delete the conf_commands object in a listener +*ListenersApi* | [**delete_listener_tls_session**](docs/ListenersApi.md#delete_listener_tls_session) | **Delete** /config/listeners/{listenerName}/tls/session | Delete the session object in a listener +*ListenersApi* | [**delete_listener_tls_session_ticket**](docs/ListenersApi.md#delete_listener_tls_session_ticket) | **Delete** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Delete a ticket array item in a listener +*ListenersApi* | [**delete_listener_tls_session_tickets**](docs/ListenersApi.md#delete_listener_tls_session_tickets) | **Delete** /config/listeners/{listenerName}/tls/session/tickets | Delete the tickets option in a listener +*ListenersApi* | [**delete_listeners**](docs/ListenersApi.md#delete_listeners) | **Delete** /config/listeners | Delete all the listeners +*ListenersApi* | [**get_listener**](docs/ListenersApi.md#get_listener) | **Get** /config/listeners/{listenerName} | Retrieve a listener object +*ListenersApi* | [**get_listener_forwarded**](docs/ListenersApi.md#get_listener_forwarded) | **Get** /config/listeners/{listenerName}/forwarded | Retrieve the forwarded object in a listener +*ListenersApi* | [**get_listener_forwarded_client_ip**](docs/ListenersApi.md#get_listener_forwarded_client_ip) | **Get** /config/listeners/{listenerName}/forwarded/client_ip | Retrieve the client_ip option in a listener +*ListenersApi* | [**get_listener_forwarded_protocol**](docs/ListenersApi.md#get_listener_forwarded_protocol) | **Get** /config/listeners/{listenerName}/forwarded/protocol | Retrieve the protocol option in a listener +*ListenersApi* | [**get_listener_forwarded_recursive**](docs/ListenersApi.md#get_listener_forwarded_recursive) | **Get** /config/listeners/{listenerName}/forwarded/recursive | Retrieve the recursive option in a listener +*ListenersApi* | [**get_listener_forwarded_source**](docs/ListenersApi.md#get_listener_forwarded_source) | **Get** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Retrieve a source array item in a listener +*ListenersApi* | [**get_listener_pass**](docs/ListenersApi.md#get_listener_pass) | **Get** /config/listeners/{listenerName}/pass | Retrieve the pass option in a listener +*ListenersApi* | [**get_listener_tls**](docs/ListenersApi.md#get_listener_tls) | **Get** /config/listeners/{listenerName}/tls | Retrieve the tls object in a listener +*ListenersApi* | [**get_listener_tls_certificate**](docs/ListenersApi.md#get_listener_tls_certificate) | **Get** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Retrieve a certificate array item in a listener +*ListenersApi* | [**get_listener_tls_session**](docs/ListenersApi.md#get_listener_tls_session) | **Get** /config/listeners/{listenerName}/tls/session | Retrieve the session object in a listener +*ListenersApi* | [**get_listener_tls_session_ticket**](docs/ListenersApi.md#get_listener_tls_session_ticket) | **Get** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Retrieve a ticket array item in a listener +*ListenersApi* | [**get_listeners**](docs/ListenersApi.md#get_listeners) | **Get** /config/listeners | Retrieve all the listeners +*ListenersApi* | [**insert_listener_forwarded_source**](docs/ListenersApi.md#insert_listener_forwarded_source) | **Post** /config/listeners/{listenerName}/forwarded/source | Add a new source array item in a listener +*ListenersApi* | [**insert_listener_tls_certificate**](docs/ListenersApi.md#insert_listener_tls_certificate) | **Post** /config/listeners/{listenerName}/tls/certificate | Add a new certificate array item in a listener +*ListenersApi* | [**insert_listener_tls_session_ticket**](docs/ListenersApi.md#insert_listener_tls_session_ticket) | **Post** /config/listeners/{listenerName}/tls/session/tickets | Add a new tickets array item in a listener +*ListenersApi* | [**list_listener_forwarded_sources**](docs/ListenersApi.md#list_listener_forwarded_sources) | **Get** /config/listeners/{listenerName}/forwarded/source | Retrieve the source option in a listener +*ListenersApi* | [**list_listener_tls_certificates**](docs/ListenersApi.md#list_listener_tls_certificates) | **Get** /config/listeners/{listenerName}/tls/certificate | Retrieve the certificate option in a listener +*ListenersApi* | [**list_listener_tls_conf_commands**](docs/ListenersApi.md#list_listener_tls_conf_commands) | **Get** /config/listeners/{listenerName}/tls/conf_commands | Retrieve the conf_commands object in a listener +*ListenersApi* | [**list_listener_tls_session_tickets**](docs/ListenersApi.md#list_listener_tls_session_tickets) | **Get** /config/listeners/{listenerName}/tls/session/tickets | Retrieve the tickets option in a listener +*ListenersApi* | [**update_listener**](docs/ListenersApi.md#update_listener) | **Put** /config/listeners/{listenerName} | Create or overwrite a listener object +*ListenersApi* | [**update_listener_forwarded**](docs/ListenersApi.md#update_listener_forwarded) | **Put** /config/listeners/{listenerName}/forwarded | Create or overwrite the forwarded object in a listener +*ListenersApi* | [**update_listener_forwarded_client_ip**](docs/ListenersApi.md#update_listener_forwarded_client_ip) | **Put** /config/listeners/{listenerName}/forwarded/client_ip | Create or overwrite the client_ip option in a listener +*ListenersApi* | [**update_listener_forwarded_protocol**](docs/ListenersApi.md#update_listener_forwarded_protocol) | **Put** /config/listeners/{listenerName}/forwarded/protocol | Create or overwrite the protocol option in a listener +*ListenersApi* | [**update_listener_forwarded_recursive**](docs/ListenersApi.md#update_listener_forwarded_recursive) | **Put** /config/listeners/{listenerName}/forwarded/recursive | Create or overwrite the recursive option in a listener +*ListenersApi* | [**update_listener_forwarded_source**](docs/ListenersApi.md#update_listener_forwarded_source) | **Put** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Update a source array item in a listener +*ListenersApi* | [**update_listener_forwarded_sources**](docs/ListenersApi.md#update_listener_forwarded_sources) | **Put** /config/listeners/{listenerName}/forwarded/source | Create or overwrite the source option in a listener +*ListenersApi* | [**update_listener_pass**](docs/ListenersApi.md#update_listener_pass) | **Put** /config/listeners/{listenerName}/pass | Update the pass option in a listener +*ListenersApi* | [**update_listener_tls**](docs/ListenersApi.md#update_listener_tls) | **Put** /config/listeners/{listenerName}/tls | Create or overwrite the tls object in a listener +*ListenersApi* | [**update_listener_tls_certificate**](docs/ListenersApi.md#update_listener_tls_certificate) | **Put** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Update a certificate array item in a listener +*ListenersApi* | [**update_listener_tls_certificates**](docs/ListenersApi.md#update_listener_tls_certificates) | **Put** /config/listeners/{listenerName}/tls/certificate | Create or overwrite the certificate option in a listener +*ListenersApi* | [**update_listener_tls_conf_commands**](docs/ListenersApi.md#update_listener_tls_conf_commands) | **Put** /config/listeners/{listenerName}/tls/conf_commands | Create or overwrite the conf_commands object in a listener +*ListenersApi* | [**update_listener_tls_session**](docs/ListenersApi.md#update_listener_tls_session) | **Put** /config/listeners/{listenerName}/tls/session | Create or overwrite the session object in a listener +*ListenersApi* | [**update_listener_tls_session_ticket**](docs/ListenersApi.md#update_listener_tls_session_ticket) | **Put** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Create or overwrite a ticket array item in a listener +*ListenersApi* | [**update_listener_tls_session_tickets**](docs/ListenersApi.md#update_listener_tls_session_tickets) | **Put** /config/listeners/{listenerName}/tls/session/tickets | Create or overwrite the tickets option in a listener +*ListenersApi* | [**update_listeners**](docs/ListenersApi.md#update_listeners) | **Put** /config/listeners | Create or overwrite all the listeners +*RoutesApi* | [**delete_routes**](docs/RoutesApi.md#delete_routes) | **Delete** /config/routes | Delete the routes entity +*RoutesApi* | [**get_routes**](docs/RoutesApi.md#get_routes) | **Get** /config/routes | Retrieve the routes entity +*RoutesApi* | [**update_routes**](docs/RoutesApi.md#update_routes) | **Put** /config/routes | Overwrite the routes entity +*SettingsApi* | [**delete_settings**](docs/SettingsApi.md#delete_settings) | **Delete** /config/settings | Delete the settings object +*SettingsApi* | [**delete_settings_discard_unsafe_fields**](docs/SettingsApi.md#delete_settings_discard_unsafe_fields) | **Delete** /config/settings/http/discard_unsafe_fields | Delete the discard_unsafe_fields option +*SettingsApi* | [**delete_settings_http**](docs/SettingsApi.md#delete_settings_http) | **Delete** /config/settings/http | Delete the http object +*SettingsApi* | [**delete_settings_http_body_read_timeout**](docs/SettingsApi.md#delete_settings_http_body_read_timeout) | **Delete** /config/settings/http/body_read_timeout | Delete the body_read_timeout option +*SettingsApi* | [**delete_settings_http_header_read_timeout**](docs/SettingsApi.md#delete_settings_http_header_read_timeout) | **Delete** /config/settings/http/header_read_timeout | Delete the header_read_timeout option +*SettingsApi* | [**delete_settings_http_idle_timeout**](docs/SettingsApi.md#delete_settings_http_idle_timeout) | **Delete** /config/settings/http/idle_timeout | Delete the idle_timeout option +*SettingsApi* | [**delete_settings_http_max_body_size**](docs/SettingsApi.md#delete_settings_http_max_body_size) | **Delete** /config/settings/http/max_body_size | Delete the max_body_size option +*SettingsApi* | [**delete_settings_http_send_timeout**](docs/SettingsApi.md#delete_settings_http_send_timeout) | **Delete** /config/settings/http/send_timeout | Delete the send_timeout option +*SettingsApi* | [**delete_settings_http_static**](docs/SettingsApi.md#delete_settings_http_static) | **Delete** /config/settings/http/static | Delete the static object +*SettingsApi* | [**delete_settings_http_static_mime_type**](docs/SettingsApi.md#delete_settings_http_static_mime_type) | **Delete** /config/settings/http/static/mime_types/{mimeType} | Delete the MIME type option +*SettingsApi* | [**delete_settings_http_static_mime_types**](docs/SettingsApi.md#delete_settings_http_static_mime_types) | **Delete** /config/settings/http/static/mime_types | Delete the mime_types object +*SettingsApi* | [**delete_settings_log_route**](docs/SettingsApi.md#delete_settings_log_route) | **Delete** /config/settings/http/log_route | Delete the log_route option +*SettingsApi* | [**delete_settings_server_version**](docs/SettingsApi.md#delete_settings_server_version) | **Delete** /config/settings/http/server_version | Delete the server_version option +*SettingsApi* | [**get_settings**](docs/SettingsApi.md#get_settings) | **Get** /config/settings | Retrieve the settings object +*SettingsApi* | [**get_settings_discard_unsafe_fields**](docs/SettingsApi.md#get_settings_discard_unsafe_fields) | **Get** /config/settings/http/discard_unsafe_fields | Retrieve the discard_unsafe_fields option from http settings +*SettingsApi* | [**get_settings_http**](docs/SettingsApi.md#get_settings_http) | **Get** /config/settings/http | Retrieve the http object from settings +*SettingsApi* | [**get_settings_http_body_read_timeout**](docs/SettingsApi.md#get_settings_http_body_read_timeout) | **Get** /config/settings/http/body_read_timeout | Retrieve the body_read_timeout option from http settings +*SettingsApi* | [**get_settings_http_header_read_timeout**](docs/SettingsApi.md#get_settings_http_header_read_timeout) | **Get** /config/settings/http/header_read_timeout | Retrieve the header_read_timeout option from http settings +*SettingsApi* | [**get_settings_http_idle_timeout**](docs/SettingsApi.md#get_settings_http_idle_timeout) | **Get** /config/settings/http/idle_timeout | Retrieve the idle_timeout option from http settings +*SettingsApi* | [**get_settings_http_max_body_size**](docs/SettingsApi.md#get_settings_http_max_body_size) | **Get** /config/settings/http/max_body_size | Retrieve the max_body_size option from http settings +*SettingsApi* | [**get_settings_http_send_timeout**](docs/SettingsApi.md#get_settings_http_send_timeout) | **Get** /config/settings/http/send_timeout | Retrieve the send_timeout option from http settings +*SettingsApi* | [**get_settings_http_static**](docs/SettingsApi.md#get_settings_http_static) | **Get** /config/settings/http/static | Retrieve the static object from http settings +*SettingsApi* | [**get_settings_http_static_mime_type**](docs/SettingsApi.md#get_settings_http_static_mime_type) | **Get** /config/settings/http/static/mime_types/{mimeType} | Retrieve the MIME type option from MIME type settings +*SettingsApi* | [**get_settings_http_static_mime_types**](docs/SettingsApi.md#get_settings_http_static_mime_types) | **Get** /config/settings/http/static/mime_types | Retrieve the mime_types object from static settings +*SettingsApi* | [**get_settings_log_route**](docs/SettingsApi.md#get_settings_log_route) | **Get** /config/settings/http/log_route | Retrieve the log_route option from http settings +*SettingsApi* | [**get_settings_server_version**](docs/SettingsApi.md#get_settings_server_version) | **Get** /config/settings/http/server_version | Retrieve the server_version option from http settings +*SettingsApi* | [**update_settings**](docs/SettingsApi.md#update_settings) | **Put** /config/settings | Create or overwrite the settings object +*SettingsApi* | [**update_settings_discard_unsafe_fields**](docs/SettingsApi.md#update_settings_discard_unsafe_fields) | **Put** /config/settings/http/discard_unsafe_fields | Create or overwrite the discard_unsafe_fields option +*SettingsApi* | [**update_settings_http**](docs/SettingsApi.md#update_settings_http) | **Put** /config/settings/http | Create or overwrite the http object +*SettingsApi* | [**update_settings_http_body_read_timeout**](docs/SettingsApi.md#update_settings_http_body_read_timeout) | **Put** /config/settings/http/body_read_timeout | Create or overwrite the body_read_timeout option +*SettingsApi* | [**update_settings_http_header_read_timeout**](docs/SettingsApi.md#update_settings_http_header_read_timeout) | **Put** /config/settings/http/header_read_timeout | Create or overwrite the header_read_timeout option +*SettingsApi* | [**update_settings_http_idle_timeout**](docs/SettingsApi.md#update_settings_http_idle_timeout) | **Put** /config/settings/http/idle_timeout | Create or overwrite the idle_timeout option +*SettingsApi* | [**update_settings_http_max_body_size**](docs/SettingsApi.md#update_settings_http_max_body_size) | **Put** /config/settings/http/max_body_size | Create or overwrite the max_body_size option +*SettingsApi* | [**update_settings_http_send_timeout**](docs/SettingsApi.md#update_settings_http_send_timeout) | **Put** /config/settings/http/send_timeout | Create or overwrite the send_timeout option +*SettingsApi* | [**update_settings_http_static**](docs/SettingsApi.md#update_settings_http_static) | **Put** /config/settings/http/static | Create or overwrite the static object +*SettingsApi* | [**update_settings_http_static_mime_type**](docs/SettingsApi.md#update_settings_http_static_mime_type) | **Put** /config/settings/http/static/mime_types/{mimeType} | Create or overwrite the MIME type option +*SettingsApi* | [**update_settings_http_static_mime_types**](docs/SettingsApi.md#update_settings_http_static_mime_types) | **Put** /config/settings/http/static/mime_types | Create or overwrite the mime_types object +*SettingsApi* | [**update_settings_log_route**](docs/SettingsApi.md#update_settings_log_route) | **Put** /config/settings/http/log_route | Create or overwrite the log_route option +*SettingsApi* | [**update_settings_server_version**](docs/SettingsApi.md#update_settings_server_version) | **Put** /config/settings/http/server_version | Create or overwrite the server_version option +*StatusApi* | [**get_status**](docs/StatusApi.md#get_status) | **Get** /status | Retrieve the status object +*StatusApi* | [**get_status_applications**](docs/StatusApi.md#get_status_applications) | **Get** /status/applications | Retrieve the applications status object +*StatusApi* | [**get_status_applications_app**](docs/StatusApi.md#get_status_applications_app) | **Get** /status/applications/{appName} | Retrieve the app status object +*StatusApi* | [**get_status_applications_app_processes**](docs/StatusApi.md#get_status_applications_app_processes) | **Get** /status/applications/{appName}/processes | Retrieve the processes app status object +*StatusApi* | [**get_status_applications_app_processes_idle**](docs/StatusApi.md#get_status_applications_app_processes_idle) | **Get** /status/applications/{appName}/processes/idle | Retrieve the idle processes app status number +*StatusApi* | [**get_status_applications_app_processes_running**](docs/StatusApi.md#get_status_applications_app_processes_running) | **Get** /status/applications/{appName}/processes/running | Retrieve the running processes app status number +*StatusApi* | [**get_status_applications_app_processes_starting**](docs/StatusApi.md#get_status_applications_app_processes_starting) | **Get** /status/applications/{appName}/processes/starting | Retrieve the starting processes app status number +*StatusApi* | [**get_status_applications_app_requests**](docs/StatusApi.md#get_status_applications_app_requests) | **Get** /status/applications/{appName}/requests | Retrieve the requests app status object +*StatusApi* | [**get_status_applications_app_requests_active**](docs/StatusApi.md#get_status_applications_app_requests_active) | **Get** /status/applications/{appName}/requests/active | Retrieve the active requests app status number +*StatusApi* | [**get_status_connections**](docs/StatusApi.md#get_status_connections) | **Get** /status/connections | Retrieve the connections status object +*StatusApi* | [**get_status_connections_accepted**](docs/StatusApi.md#get_status_connections_accepted) | **Get** /status/connections/accepted | Retrieve the accepted connections number +*StatusApi* | [**get_status_connections_active**](docs/StatusApi.md#get_status_connections_active) | **Get** /status/connections/active | Retrieve the active connections number +*StatusApi* | [**get_status_connections_closed**](docs/StatusApi.md#get_status_connections_closed) | **Get** /status/connections/closed | Retrieve the closed connections number +*StatusApi* | [**get_status_connections_idle**](docs/StatusApi.md#get_status_connections_idle) | **Get** /status/connections/idle | Retrieve the idle connections number +*StatusApi* | [**get_status_requests**](docs/StatusApi.md#get_status_requests) | **Get** /status/requests | Retrieve the requests status object +*StatusApi* | [**get_status_requests_total**](docs/StatusApi.md#get_status_requests_total) | **Get** /status/requests/total | Retrieve the total requests number +*TlsApi* | [**delete_listener_tls**](docs/TlsApi.md#delete_listener_tls) | **Delete** /config/listeners/{listenerName}/tls | Delete the tls object in a listener +*TlsApi* | [**delete_listener_tls_certificate**](docs/TlsApi.md#delete_listener_tls_certificate) | **Delete** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Delete a certificate array item in a listener +*TlsApi* | [**delete_listener_tls_certificates**](docs/TlsApi.md#delete_listener_tls_certificates) | **Delete** /config/listeners/{listenerName}/tls/certificate | Delete the certificate option in a listener +*TlsApi* | [**delete_listener_tls_conf_commands**](docs/TlsApi.md#delete_listener_tls_conf_commands) | **Delete** /config/listeners/{listenerName}/tls/conf_commands | Delete the conf_commands object in a listener +*TlsApi* | [**delete_listener_tls_session**](docs/TlsApi.md#delete_listener_tls_session) | **Delete** /config/listeners/{listenerName}/tls/session | Delete the session object in a listener +*TlsApi* | [**delete_listener_tls_session_ticket**](docs/TlsApi.md#delete_listener_tls_session_ticket) | **Delete** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Delete a ticket array item in a listener +*TlsApi* | [**delete_listener_tls_session_tickets**](docs/TlsApi.md#delete_listener_tls_session_tickets) | **Delete** /config/listeners/{listenerName}/tls/session/tickets | Delete the tickets option in a listener +*TlsApi* | [**get_listener_tls**](docs/TlsApi.md#get_listener_tls) | **Get** /config/listeners/{listenerName}/tls | Retrieve the tls object in a listener +*TlsApi* | [**get_listener_tls_certificate**](docs/TlsApi.md#get_listener_tls_certificate) | **Get** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Retrieve a certificate array item in a listener +*TlsApi* | [**get_listener_tls_session**](docs/TlsApi.md#get_listener_tls_session) | **Get** /config/listeners/{listenerName}/tls/session | Retrieve the session object in a listener +*TlsApi* | [**get_listener_tls_session_ticket**](docs/TlsApi.md#get_listener_tls_session_ticket) | **Get** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Retrieve a ticket array item in a listener +*TlsApi* | [**insert_listener_tls_certificate**](docs/TlsApi.md#insert_listener_tls_certificate) | **Post** /config/listeners/{listenerName}/tls/certificate | Add a new certificate array item in a listener +*TlsApi* | [**insert_listener_tls_session_ticket**](docs/TlsApi.md#insert_listener_tls_session_ticket) | **Post** /config/listeners/{listenerName}/tls/session/tickets | Add a new tickets array item in a listener +*TlsApi* | [**list_listener_tls_certificates**](docs/TlsApi.md#list_listener_tls_certificates) | **Get** /config/listeners/{listenerName}/tls/certificate | Retrieve the certificate option in a listener +*TlsApi* | [**list_listener_tls_conf_commands**](docs/TlsApi.md#list_listener_tls_conf_commands) | **Get** /config/listeners/{listenerName}/tls/conf_commands | Retrieve the conf_commands object in a listener +*TlsApi* | [**list_listener_tls_session_tickets**](docs/TlsApi.md#list_listener_tls_session_tickets) | **Get** /config/listeners/{listenerName}/tls/session/tickets | Retrieve the tickets option in a listener +*TlsApi* | [**update_listener_tls**](docs/TlsApi.md#update_listener_tls) | **Put** /config/listeners/{listenerName}/tls | Create or overwrite the tls object in a listener +*TlsApi* | [**update_listener_tls_certificate**](docs/TlsApi.md#update_listener_tls_certificate) | **Put** /config/listeners/{listenerName}/tls/certificate/{arrayIndex} | Update a certificate array item in a listener +*TlsApi* | [**update_listener_tls_certificates**](docs/TlsApi.md#update_listener_tls_certificates) | **Put** /config/listeners/{listenerName}/tls/certificate | Create or overwrite the certificate option in a listener +*TlsApi* | [**update_listener_tls_conf_commands**](docs/TlsApi.md#update_listener_tls_conf_commands) | **Put** /config/listeners/{listenerName}/tls/conf_commands | Create or overwrite the conf_commands object in a listener +*TlsApi* | [**update_listener_tls_session**](docs/TlsApi.md#update_listener_tls_session) | **Put** /config/listeners/{listenerName}/tls/session | Create or overwrite the session object in a listener +*TlsApi* | [**update_listener_tls_session_ticket**](docs/TlsApi.md#update_listener_tls_session_ticket) | **Put** /config/listeners/{listenerName}/tls/session/tickets/{arrayIndex} | Create or overwrite a ticket array item in a listener +*TlsApi* | [**update_listener_tls_session_tickets**](docs/TlsApi.md#update_listener_tls_session_tickets) | **Put** /config/listeners/{listenerName}/tls/session/tickets | Create or overwrite the tickets option in a listener +*XffApi* | [**delete_listener_forwarded_recursive**](docs/XffApi.md#delete_listener_forwarded_recursive) | **Delete** /config/listeners/{listenerName}/forwarded/recursive | Delete the recursive object in a listener +*XffApi* | [**delete_listener_forwarded_source**](docs/XffApi.md#delete_listener_forwarded_source) | **Delete** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Delete a source array item in a listener +*XffApi* | [**delete_listener_forwarded_sources**](docs/XffApi.md#delete_listener_forwarded_sources) | **Delete** /config/listeners/{listenerName}/forwarded/source | Delete the source option in a listener +*XffApi* | [**delete_listener_forwared**](docs/XffApi.md#delete_listener_forwared) | **Delete** /config/listeners/{listenerName}/forwarded | Delete the forwarded object in a listener +*XffApi* | [**get_listener_forwarded**](docs/XffApi.md#get_listener_forwarded) | **Get** /config/listeners/{listenerName}/forwarded | Retrieve the forwarded object in a listener +*XffApi* | [**get_listener_forwarded_client_ip**](docs/XffApi.md#get_listener_forwarded_client_ip) | **Get** /config/listeners/{listenerName}/forwarded/client_ip | Retrieve the client_ip option in a listener +*XffApi* | [**get_listener_forwarded_protocol**](docs/XffApi.md#get_listener_forwarded_protocol) | **Get** /config/listeners/{listenerName}/forwarded/protocol | Retrieve the protocol option in a listener +*XffApi* | [**get_listener_forwarded_recursive**](docs/XffApi.md#get_listener_forwarded_recursive) | **Get** /config/listeners/{listenerName}/forwarded/recursive | Retrieve the recursive option in a listener +*XffApi* | [**get_listener_forwarded_source**](docs/XffApi.md#get_listener_forwarded_source) | **Get** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Retrieve a source array item in a listener +*XffApi* | [**insert_listener_forwarded_source**](docs/XffApi.md#insert_listener_forwarded_source) | **Post** /config/listeners/{listenerName}/forwarded/source | Add a new source array item in a listener +*XffApi* | [**list_listener_forwarded_sources**](docs/XffApi.md#list_listener_forwarded_sources) | **Get** /config/listeners/{listenerName}/forwarded/source | Retrieve the source option in a listener +*XffApi* | [**update_listener_forwarded**](docs/XffApi.md#update_listener_forwarded) | **Put** /config/listeners/{listenerName}/forwarded | Create or overwrite the forwarded object in a listener +*XffApi* | [**update_listener_forwarded_client_ip**](docs/XffApi.md#update_listener_forwarded_client_ip) | **Put** /config/listeners/{listenerName}/forwarded/client_ip | Create or overwrite the client_ip option in a listener +*XffApi* | [**update_listener_forwarded_protocol**](docs/XffApi.md#update_listener_forwarded_protocol) | **Put** /config/listeners/{listenerName}/forwarded/protocol | Create or overwrite the protocol option in a listener +*XffApi* | [**update_listener_forwarded_recursive**](docs/XffApi.md#update_listener_forwarded_recursive) | **Put** /config/listeners/{listenerName}/forwarded/recursive | Create or overwrite the recursive option in a listener +*XffApi* | [**update_listener_forwarded_source**](docs/XffApi.md#update_listener_forwarded_source) | **Put** /config/listeners/{listenerName}/forwarded/source/{arrayIndex} | Update a source array item in a listener +*XffApi* | [**update_listener_forwarded_sources**](docs/XffApi.md#update_listener_forwarded_sources) | **Put** /config/listeners/{listenerName}/forwarded/source | Create or overwrite the source option in a listener + + +## Documentation For Models + + - [CertBundle](docs/CertBundle.md) + - [CertBundleChainCert](docs/CertBundleChainCert.md) + - [CertBundleChainCertIssuer](docs/CertBundleChainCertIssuer.md) + - [CertBundleChainCertSubj](docs/CertBundleChainCertSubj.md) + - [CertBundleChainCertValidity](docs/CertBundleChainCertValidity.md) + - [Config](docs/Config.md) + - [ConfigAccessLog](docs/ConfigAccessLog.md) + - [ConfigAccessLogObject](docs/ConfigAccessLogObject.md) + - [ConfigApplication](docs/ConfigApplication.md) + - [ConfigApplicationCommon](docs/ConfigApplicationCommon.md) + - [ConfigApplicationCommonIsolation](docs/ConfigApplicationCommonIsolation.md) + - [ConfigApplicationCommonIsolationAutomount](docs/ConfigApplicationCommonIsolationAutomount.md) + - [ConfigApplicationCommonIsolationCgroup](docs/ConfigApplicationCommonIsolationCgroup.md) + - [ConfigApplicationCommonIsolationGidmapInner](docs/ConfigApplicationCommonIsolationGidmapInner.md) + - [ConfigApplicationCommonIsolationNamespaces](docs/ConfigApplicationCommonIsolationNamespaces.md) + - [ConfigApplicationCommonIsolationUidmapInner](docs/ConfigApplicationCommonIsolationUidmapInner.md) + - [ConfigApplicationCommonLimits](docs/ConfigApplicationCommonLimits.md) + - [ConfigApplicationCommonProcesses](docs/ConfigApplicationCommonProcesses.md) + - [ConfigApplicationCommonProcessesAnyOf](docs/ConfigApplicationCommonProcessesAnyOf.md) + - [ConfigApplicationExternal](docs/ConfigApplicationExternal.md) + - [ConfigApplicationJava](docs/ConfigApplicationJava.md) + - [ConfigApplicationPerl](docs/ConfigApplicationPerl.md) + - [ConfigApplicationPhp](docs/ConfigApplicationPhp.md) + - [ConfigApplicationPhpAllOfOptions](docs/ConfigApplicationPhpAllOfOptions.md) + - [ConfigApplicationPhpAllOfTargets](docs/ConfigApplicationPhpAllOfTargets.md) + - [ConfigApplicationPython](docs/ConfigApplicationPython.md) + - [ConfigApplicationPythonAllOfPath](docs/ConfigApplicationPythonAllOfPath.md) + - [ConfigApplicationPythonAllOfTargets](docs/ConfigApplicationPythonAllOfTargets.md) + - [ConfigApplicationRuby](docs/ConfigApplicationRuby.md) + - [ConfigApplicationWasi](docs/ConfigApplicationWasi.md) + - [ConfigApplicationWasm](docs/ConfigApplicationWasm.md) + - [ConfigApplicationWasmAllOfAccess](docs/ConfigApplicationWasmAllOfAccess.md) + - [ConfigListener](docs/ConfigListener.md) + - [ConfigListenerForwarded](docs/ConfigListenerForwarded.md) + - [ConfigListenerForwardedSource](docs/ConfigListenerForwardedSource.md) + - [ConfigListenerTls](docs/ConfigListenerTls.md) + - [ConfigListenerTlsCertificate](docs/ConfigListenerTlsCertificate.md) + - [ConfigListenerTlsSession](docs/ConfigListenerTlsSession.md) + - [ConfigListenerTlsSessionTickets](docs/ConfigListenerTlsSessionTickets.md) + - [ConfigRouteStep](docs/ConfigRouteStep.md) + - [ConfigRouteStepAction](docs/ConfigRouteStepAction.md) + - [ConfigRouteStepActionPass](docs/ConfigRouteStepActionPass.md) + - [ConfigRouteStepActionProxy](docs/ConfigRouteStepActionProxy.md) + - [ConfigRouteStepActionReturn](docs/ConfigRouteStepActionReturn.md) + - [ConfigRouteStepActionShare](docs/ConfigRouteStepActionShare.md) + - [ConfigRouteStepMatch](docs/ConfigRouteStepMatch.md) + - [ConfigRouteStepMatchArguments](docs/ConfigRouteStepMatchArguments.md) + - [ConfigRouteStepMatchCookies](docs/ConfigRouteStepMatchCookies.md) + - [ConfigRouteStepMatchHeaders](docs/ConfigRouteStepMatchHeaders.md) + - [ConfigRoutes](docs/ConfigRoutes.md) + - [ConfigSettings](docs/ConfigSettings.md) + - [ConfigSettingsHttp](docs/ConfigSettingsHttp.md) + - [ConfigSettingsHttpStatic](docs/ConfigSettingsHttpStatic.md) + - [ConfigSettingsHttpStaticMimeType](docs/ConfigSettingsHttpStaticMimeType.md) + - [Status](docs/Status.md) + - [StatusApplicationsApp](docs/StatusApplicationsApp.md) + - [StatusApplicationsAppProcesses](docs/StatusApplicationsAppProcesses.md) + - [StatusApplicationsAppRequests](docs/StatusApplicationsAppRequests.md) + - [StatusConnections](docs/StatusConnections.md) + - [StatusRequests](docs/StatusRequests.md) + - [StringOrStringArray](docs/StringOrStringArray.md) + + +To get access to the crate's generated documentation, use: + +``` +cargo doc --open +``` + +## Author + +unit-owner@nginx.org + diff --git a/tools/unitctl/unit-openapi/openapi-templates/Cargo.mustache b/tools/unitctl/unit-openapi/openapi-templates/Cargo.mustache new file mode 100644 index 00000000..feca05ee --- /dev/null +++ b/tools/unitctl/unit-openapi/openapi-templates/Cargo.mustache @@ -0,0 +1,65 @@ +[package] +name = "{{{packageName}}}" +version = "{{#lambdaVersion}}{{{packageVersion}}}{{/lambdaVersion}}" +{{#infoEmail}} +authors = ["{{{.}}}"] +{{/infoEmail}} +{{^infoEmail}} +authors = ["OpenAPI Generator team and contributors"] +{{/infoEmail}} +{{#appDescription}} +description = "{{{.}}}" +{{/appDescription}} +{{#licenseInfo}} +license = "{{.}}" +{{/licenseInfo}} +{{^licenseInfo}} +# Override this license by providing a License Object in the OpenAPI. +license = "Unlicense" +{{/licenseInfo}} +edition = "2018" +{{#publishRustRegistry}} +publish = ["{{.}}"] +{{/publishRustRegistry}} +{{#repositoryUrl}} +repository = "{{.}}" +{{/repositoryUrl}} +{{#documentationUrl}} +documentation = "{{.}}" +{{/documentationUrl}} +{{#homePageUrl}} +homepage = "{{.}} +{{/homePageUrl}} + +[dependencies] +serde = "1.0" +serde_derive = "1.0" +{{#serdeWith}} +serde_with = "^2.0" +{{/serdeWith}} +serde_json = "1.0" +url = "2.2" +{{#hyper}} +hyper = { version = "0.14" } +http = "0.2" +base64 = "0.21" +futures = "0.3" +{{/hyper}} +{{#withAWSV4Signature}} +aws-sigv4 = "0.3.0" +http = "0.2.5" +secrecy = "0.8.0" +{{/withAWSV4Signature}} +{{#reqwest}} +{{^supportAsync}} +reqwest = "~0.9" +{{/supportAsync}} +{{#supportAsync}} +{{#supportMiddleware}} +reqwest-middleware = "0.2.0" +{{/supportMiddleware}} +[dependencies.reqwest] +version = "^0.11" +features = ["json", "multipart"] +{{/supportAsync}} +{{/reqwest}} diff --git a/tools/unitctl/unit-openapi/openapi-templates/request.rs b/tools/unitctl/unit-openapi/openapi-templates/request.rs new file mode 100644 index 00000000..9cf480cc --- /dev/null +++ b/tools/unitctl/unit-openapi/openapi-templates/request.rs @@ -0,0 +1,248 @@ +use std::collections::HashMap; +use std::pin::Pin; + +use base64::{alphabet, Engine}; +use base64::engine::general_purpose::NO_PAD; +use base64::engine::GeneralPurpose; + +use futures; +use futures::Future; +use futures::future::*; +use hyper; +use hyper::header::{AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, HeaderValue, USER_AGENT}; +use serde; +use serde_json; + +use super::{configuration, Error}; + +const MIME_ENCODER: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, NO_PAD); + +pub(crate) struct ApiKey { + pub in_header: bool, + pub in_query: bool, + pub param_name: String, +} + +impl ApiKey { + fn key(&self, prefix: &Option<String>, key: &str) -> String { + match prefix { + None => key.to_owned(), + Some(ref prefix) => format!("{} {}", prefix, key), + } + } +} + +#[allow(dead_code)] +pub(crate) enum Auth { + None, + ApiKey(ApiKey), + Basic, + Oauth, +} + +/// If the authorization type is unspecified then it will be automatically detected based +/// on the configuration. This functionality is useful when the OpenAPI definition does not +/// include an authorization scheme. +pub(crate) struct Request { + auth: Option<Auth>, + method: hyper::Method, + path: String, + query_params: HashMap<String, String>, + no_return_type: bool, + path_params: HashMap<String, String>, + form_params: HashMap<String, String>, + header_params: HashMap<String, String>, + // TODO: multiple body params are possible technically, but not supported here. + serialized_body: Option<String>, +} + +#[allow(dead_code)] +impl Request { + pub fn new(method: hyper::Method, path: String) -> Self { + Request { + auth: None, + method, + path, + query_params: HashMap::new(), + path_params: HashMap::new(), + form_params: HashMap::new(), + header_params: HashMap::new(), + serialized_body: None, + no_return_type: false, + } + } + + pub fn with_body_param<T: serde::Serialize>(mut self, param: T) -> Self { + self.serialized_body = Some(serde_json::to_string(¶m).unwrap()); + self + } + + pub fn with_header_param(mut self, basename: String, param: String) -> Self { + self.header_params.insert(basename, param); + self + } + + #[allow(unused)] + pub fn with_query_param(mut self, basename: String, param: String) -> Self { + self.query_params.insert(basename, param); + self + } + + #[allow(unused)] + pub fn with_path_param(mut self, basename: String, param: String) -> Self { + self.path_params.insert(basename, param); + self + } + + #[allow(unused)] + pub fn with_form_param(mut self, basename: String, param: String) -> Self { + self.form_params.insert(basename, param); + self + } + + pub fn returns_nothing(mut self) -> Self { + self.no_return_type = true; + self + } + + pub fn with_auth(mut self, auth: Auth) -> Self { + self.auth = Some(auth); + self + } + + pub fn execute<'a, C, U>( + self, + conf: &configuration::Configuration<C>, + ) -> Pin<Box<dyn Future<Output=Result<U, Error>> + 'a>> + where + C: hyper::client::connect::Connect + Clone + std::marker::Send + Sync, + U: Sized + std::marker::Send + 'a, + for<'de> U: serde::Deserialize<'de>, + { + let mut query_string = ::url::form_urlencoded::Serializer::new("".to_owned()); + + let mut path = self.path; + for (k, v) in self.path_params { + // replace {id} with the value of the id path param + path = path.replace(&format!("{{{}}}", k), &v); + } + + for (key, val) in self.query_params { + query_string.append_pair(&key, &val); + } + + let mut uri_str = format!("{}{}", conf.base_path, path); + + let query_string_str = query_string.finish(); + if query_string_str != "" { + uri_str += "?"; + uri_str += &query_string_str; + } + let uri: hyper::Uri = match uri_str.parse() { + Err(e) => return Box::pin(futures::future::err(Error::UriError(e))), + Ok(u) => u, + }; + + let mut req_builder = hyper::Request::builder() + .uri(uri) + .method(self.method); + + // Detect the authorization type if it hasn't been set. + let auth = self.auth.unwrap_or_else(|| + if conf.api_key.is_some() { + panic!("Cannot automatically set the API key from the configuration, it must be specified in the OpenAPI definition") + } else if conf.oauth_access_token.is_some() { + Auth::Oauth + } else if conf.basic_auth.is_some() { + Auth::Basic + } else { + Auth::None + } + ); + match auth { + Auth::ApiKey(apikey) => { + if let Some(ref key) = conf.api_key { + let val = apikey.key(&key.prefix, &key.key); + if apikey.in_query { + query_string.append_pair(&apikey.param_name, &val); + } + if apikey.in_header { + req_builder = req_builder.header(&apikey.param_name, val); + } + } + } + Auth::Basic => { + if let Some(ref auth_conf) = conf.basic_auth { + let mut text = auth_conf.0.clone(); + text.push(':'); + if let Some(ref pass) = auth_conf.1 { + text.push_str(&pass[..]); + } + let encoded = MIME_ENCODER.encode(&text); + req_builder = req_builder.header(AUTHORIZATION, encoded); + } + } + Auth::Oauth => { + if let Some(ref token) = conf.oauth_access_token { + let text = "Bearer ".to_owned() + token; + req_builder = req_builder.header(AUTHORIZATION, text); + } + } + Auth::None => {} + } + + if let Some(ref user_agent) = conf.user_agent { + req_builder = req_builder.header(USER_AGENT, match HeaderValue::from_str(user_agent) { + Ok(header_value) => header_value, + Err(e) => return Box::pin(futures::future::err(super::Error::Header(e))) + }); + } + + for (k, v) in self.header_params { + req_builder = req_builder.header(&k, v); + } + + let req_headers = req_builder.headers_mut().unwrap(); + let request_result = if self.form_params.len() > 0 { + req_headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/x-www-form-urlencoded")); + let mut enc = ::url::form_urlencoded::Serializer::new("".to_owned()); + for (k, v) in self.form_params { + enc.append_pair(&k, &v); + } + req_builder.body(hyper::Body::from(enc.finish())) + } else if let Some(body) = self.serialized_body { + req_headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + req_headers.insert(CONTENT_LENGTH, body.len().into()); + req_builder.body(hyper::Body::from(body)) + } else { + req_builder.body(hyper::Body::default()) + }; + let request = match request_result { + Ok(request) => request, + Err(e) => return Box::pin(futures::future::err(Error::from(e))) + }; + + let no_return_type = self.no_return_type; + Box::pin(conf.client + .request(request) + .map_err(|e| Error::from(e)) + .and_then(move |response| { + let status = response.status(); + if !status.is_success() { + futures::future::err::<U, Error>(Error::from((status, response.into_body()))).boxed() + } else if no_return_type { + // This is a hack; if there's no_ret_type, U is (), but serde_json gives an + // error when deserializing "" into (), so deserialize 'null' into it + // instead. + // An alternate option would be to require U: Default, and then return + // U::default() here instead since () implements that, but then we'd + // need to impl default for all models. + futures::future::ok::<U, Error>(serde_json::from_str("null").expect("serde null value")).boxed() + } else { + hyper::body::to_bytes(response.into_body()) + .map(|bytes| serde_json::from_slice(&bytes.unwrap())) + .map_err(|e| Error::from(e)).boxed() + } + })) + } +} diff --git a/tools/unitctl/unit-openapi/src/apis/error.rs b/tools/unitctl/unit-openapi/src/apis/error.rs new file mode 100644 index 00000000..a4a1e354 --- /dev/null +++ b/tools/unitctl/unit-openapi/src/apis/error.rs @@ -0,0 +1,18 @@ +use crate::apis::Error; +use std::error::Error as StdError; +use std::fmt::{Display, Formatter}; + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Error::Api(e) => write!(f, "ApiError: {:#?}", e), + Error::Header(e) => write!(f, "HeaderError: {}", e), + Error::Http(e) => write!(f, "HttpError: {:#?}", e), + Error::Hyper(e) => write!(f, "HyperError: {:#?}", e), + Error::Serde(e) => write!(f, "SerdeError: {:#?}", e), + Error::UriError(e) => write!(f, "UriError: {:#?}", e), + } + } +} + +impl StdError for Error {} diff --git a/tools/unitctl/unit-openapi/src/lib.rs b/tools/unitctl/unit-openapi/src/lib.rs new file mode 100644 index 00000000..5435cfdb --- /dev/null +++ b/tools/unitctl/unit-openapi/src/lib.rs @@ -0,0 +1,12 @@ +#![allow(clippy::all)] +#![allow(unused_imports)] +#![allow(clippy::too_many_arguments)] + +extern crate futures; +extern crate hyper; +extern crate serde; +extern crate serde_json; +extern crate url; + +pub mod apis; +pub mod models; diff --git a/tools/unitctl/unitctl/Cargo.toml b/tools/unitctl/unitctl/Cargo.toml new file mode 100644 index 00000000..ec89c975 --- /dev/null +++ b/tools/unitctl/unitctl/Cargo.toml @@ -0,0 +1,57 @@ +[package] +name = "unitctl" +description = "CLI interface to the NGINX Unit Control API" +version = "1.33.0" +authors = ["Elijah Zupancic"] +edition = "2021" +license = "Apache-2.0" + +[[bin]] +name = "unitctl" +path = "src/main.rs" + +[features] + +[dependencies] +clap = { version = "4.4", features = ["default", "derive", "cargo"] } +custom_error = "1.9" +serde = "1.0" +json5 = "0.4" +nu-json = "0.89" +serde_json = { version = "1.0", optional = false } +serde_yaml = "0.9" +rustls-pemfile = "2.0.0" +unit-client-rs = { path = "../unit-client-rs" } +colored_json = "4.1" +tempfile = "3.8" +which = "5.0" +walkdir = "2.4" + +hyper = { version = "0.14", features = ["http1", "server", "client"] } +hyperlocal = "0.8" +hyper-tls = "0.5" +tokio = { version = "1.35", features = ["macros"] } +futures = "0.3" +tar = "0.4.41" + +[package.metadata.deb] +copyright = "2022, F5" +license-file = ["../LICENSE.txt", "0"] +extended-description = """\ +A utility for controlling NGINX Unit.""" +section = "utility" +priority = "optional" +assets = [ + ["../target/release/unitctl", "usr/bin/", "755"], + ["../target/man/unitctl.1.gz", "usr/share/man/man1/", "644"] +] + +[package.metadata.generate-rpm] +summary = """\ +A utility for controlling NGINX Unit.""" +section = "utility" +priority = "optional" +assets = [ + { source = "../target/release/unitctl", dest = "/usr/bin/unitctl", mode = "755" }, + { source = "../target/man/unitctl.1.gz", dest = "/usr/share/man/man1/unitctl.1.gz", mode = "644" }, +] diff --git a/tools/unitctl/unitctl/src/cmd/applications.rs b/tools/unitctl/unitctl/src/cmd/applications.rs new file mode 100644 index 00000000..b0145724 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/applications.rs @@ -0,0 +1,46 @@ +use crate::unitctl::{ApplicationArgs, ApplicationCommands, UnitCtl}; +use crate::{wait, UnitctlError, eprint_error}; +use crate::requests::send_empty_body_deserialize_response; +use unit_client_rs::unit_client::UnitClient; + +pub(crate) async fn cmd(cli: &UnitCtl, args: &ApplicationArgs) -> Result<(), UnitctlError> { + let clients: Vec<UnitClient> = wait::wait_for_sockets(cli) + .await? + .into_iter() + .map(|sock| UnitClient::new(sock)) + .collect(); + + for client in clients { + let _ = match &args.command { + ApplicationCommands::Restart { ref name } => client + .restart_application(name) + .await + .map_err(|e| UnitctlError::UnitClientError { source: *e }) + .and_then(|r| args.output_format.write_to_stdout(&r)), + + /* we should be able to use this but the openapi generator library + * is fundamentally incorrect and provides a broken API for the + * applications endpoint. + ApplicationCommands::List {} => client + .applications() + .await + .map_err(|e| UnitctlError::UnitClientError { source: *e }) + .and_then(|response| args.output_format.write_to_stdout(&response)),*/ + + ApplicationCommands::List {} => { + args.output_format.write_to_stdout( + &send_empty_body_deserialize_response( + &client, + "GET", + "/config/applications", + ).await? + ) + }, + }.map_err(|error| { + eprint_error(&error); + std::process::exit(error.exit_code()); + }); + } + + Ok(()) +} diff --git a/tools/unitctl/unitctl/src/cmd/edit.rs b/tools/unitctl/unitctl/src/cmd/edit.rs new file mode 100644 index 00000000..6679d4a9 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/edit.rs @@ -0,0 +1,115 @@ +use crate::inputfile::{InputFile, InputFormat}; +use crate::requests::{send_and_validate_config_deserialize_response, send_empty_body_deserialize_response}; +use crate::unitctl::UnitCtl; +use crate::unitctl_error::ControlSocketErrorKind; +use crate::{wait, OutputFormat, UnitctlError}; +use std::path::{Path, PathBuf}; +use unit_client_rs::unit_client::UnitClient; +use which::which; + +const EDITOR_ENV_VARS: [&str; 2] = ["EDITOR", "VISUAL"]; +const EDITOR_KNOWN_LIST: [&str; 8] = [ + "sensible-editor", + "editor", + "vim", + "nano", + "nvim", + "vi", + "pico", + "emacs", +]; + +pub(crate) async fn cmd(cli: &UnitCtl, output_format: OutputFormat) -> Result<(), UnitctlError> { + if cli.control_socket_addresses.is_some() && + cli.control_socket_addresses.clone().unwrap().len() > 1 { + return Err(UnitctlError::ControlSocketError{ + kind: ControlSocketErrorKind::General, + message: "too many control sockets. specify at most one.".to_string(), + }); + } + + let mut control_sockets = wait::wait_for_sockets(cli).await?; + let client = UnitClient::new(control_sockets.pop().unwrap()); + // Get latest configuration + let current_config = send_empty_body_deserialize_response(&client, "GET", "/config").await?; + + // Write JSON to temporary file - this file will automatically be deleted by the OS when + // the last file handle to it is removed. + let mut temp_file = tempfile::Builder::new() + .prefix("unitctl-") + .suffix(".json") + .tempfile() + .map_err(|e| UnitctlError::IoError { source: e })?; + + // Pretty format JSON received from Unit and write to the temporary file + serde_json::to_writer_pretty(temp_file.as_file_mut(), ¤t_config) + .map_err(|e| UnitctlError::SerializationError { message: e.to_string() })?; + + // Load edited file + let temp_file_path = temp_file.path(); + let before_edit_mod_time = temp_file_path.metadata().ok().map(|m| m.modified().ok()); + + let inputfile = InputFile::FileWithFormat(temp_file_path.into(), InputFormat::Json5); + open_editor(temp_file_path)?; + let after_edit_mod_time = temp_file_path.metadata().ok().map(|m| m.modified().ok()); + + // Check if file was modified before sending to Unit + if let (Some(before), Some(after)) = (before_edit_mod_time, after_edit_mod_time) { + if before == after { + eprintln!("File was not modified - no changes will be sent to Unit"); + return Ok(()); + } + }; + + // Send edited file to Unit to overwrite current configuration + send_and_validate_config_deserialize_response(&client, "PUT", "/config", Some(&inputfile)) + .await + .and_then(|status| output_format.write_to_stdout(&status)) +} + +/// Look for an editor in the environment variables +fn find_editor_from_env() -> Option<PathBuf> { + EDITOR_ENV_VARS + .iter() + .filter_map(std::env::var_os) + .filter(|s| !s.is_empty()) + .filter_map(|s| which(s).ok()) + .filter_map(|path| path.canonicalize().ok()) + .find(|path| path.exists()) +} + +/// Look for editor in path by matching against a list of known editors or aliases +fn find_editor_from_known_list() -> Option<PathBuf> { + EDITOR_KNOWN_LIST + .iter() + .filter_map(|editor| which(editor).ok()) + .filter_map(|path| path.canonicalize().ok()) + .find(|editor| editor.exists()) +} + +/// Find the path to an editor +pub fn find_editor_path() -> Result<PathBuf, UnitctlError> { + find_editor_from_env() + .or_else(find_editor_from_known_list) + .ok_or_else(|| UnitctlError::EditorError { + message: "Could not find an editor".to_string(), + }) +} + +/// Start an editor with a given path +pub fn open_editor(path: &Path) -> Result<(), UnitctlError> { + let editor_path = find_editor_path()?; + let status = std::process::Command::new(editor_path) + .arg(path) + .status() + .map_err(|e| UnitctlError::EditorError { + message: format!("Could not open editor: {}", e), + })?; + if status.success() { + Ok(()) + } else { + Err(UnitctlError::EditorError { + message: format!("Editor exited with non-zero status: {}", status), + }) + } +} diff --git a/tools/unitctl/unitctl/src/cmd/execute.rs b/tools/unitctl/unitctl/src/cmd/execute.rs new file mode 100644 index 00000000..85aea404 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/execute.rs @@ -0,0 +1,86 @@ +use crate::inputfile::InputFile; +use crate::requests::{ + send_and_validate_config_deserialize_response, send_and_validate_pem_data_deserialize_response, + send_body_deserialize_response, send_empty_body_deserialize_response, +}; +use crate::unitctl::UnitCtl; +use crate::wait; +use crate::{OutputFormat, UnitctlError, eprint_error}; +use unit_client_rs::unit_client::UnitClient; + +pub(crate) async fn cmd( + cli: &UnitCtl, + output_format: &OutputFormat, + input_file: &Option<String>, + method: &str, + path: &str, +) -> Result<(), UnitctlError> { + let clients: Vec<_> = wait::wait_for_sockets(cli) + .await? + .into_iter() + .map(|sock| UnitClient::new(sock)) + .collect(); + + let path_trimmed = path.trim(); + let method_upper = method.to_uppercase(); + let input_file_arg = input_file + .as_ref() + .map(|file| InputFile::new(file, &path_trimmed.to_string())); + + if method_upper.eq("GET") && input_file.is_some() { + eprintln!("Cannot use GET method with input file - ignoring input file"); + } + + for client in clients { + let _ = send_and_deserialize( + client, + method_upper.clone(), + input_file_arg.clone(), + path_trimmed, + output_format + ).await + .map_err(|e| { + eprint_error(&e); + std::process::exit(e.exit_code()); + }); + } + + Ok(()) +} + +async fn send_and_deserialize( + client: UnitClient, + method: String, + input_file: Option<InputFile>, + path: &str, + output_format: &OutputFormat, +) -> Result<(), UnitctlError> { + let is_js_modules_dir = path.starts_with("/js_modules/") || path.starts_with("js_modules/"); + + // If we are sending a GET request to a JS modules directory, we want to print the contents of the JS file + // instead of the JSON response + if method.eq("GET") && is_js_modules_dir && path.ends_with(".js") { + let script = + send_body_deserialize_response::<String>(&client, method.as_str(), path, input_file.as_ref()).await?; + println!("{}", script); + return Ok(()); + } + + // Otherwise, we want to print the JSON response (a map) as represented by the output format + match input_file { + Some(input_file) => { + if input_file.is_config() { + send_and_validate_config_deserialize_response(&client, method.as_str(), path, Some(&input_file)).await + // TLS certificate data + } else if input_file.is_pem_bundle() { + send_and_validate_pem_data_deserialize_response(&client, method.as_str(), path, &input_file).await + // This is unknown data + } else { + panic!("Unknown input file type") + } + } + // A none value for an input file can be considered a request to send an empty body + None => send_empty_body_deserialize_response(&client, method.as_str(), path).await, + } + .and_then(|status| output_format.write_to_stdout(&status)) +} diff --git a/tools/unitctl/unitctl/src/cmd/import.rs b/tools/unitctl/unitctl/src/cmd/import.rs new file mode 100644 index 00000000..956832f3 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/import.rs @@ -0,0 +1,140 @@ +use crate::inputfile::{InputFile, InputFormat}; +use crate::unitctl::UnitCtl; +use crate::unitctl_error::UnitctlError; +use crate::{requests, wait}; +use std::path::{Path, PathBuf}; +use unit_client_rs::unit_client::{UnitClient, UnitSerializableMap}; +use walkdir::{DirEntry, WalkDir}; + +enum UploadFormat { + Config, + PemBundle, + Javascript, +} + +impl From<&InputFile> for UploadFormat { + fn from(input_file: &InputFile) -> Self { + if input_file.is_config() { + UploadFormat::Config + } else if input_file.is_pem_bundle() { + UploadFormat::PemBundle + } else if input_file.is_javascript() { + UploadFormat::Javascript + } else { + panic!("Unknown input file type"); + } + } +} + +impl UploadFormat { + fn can_be_overwritten(&self) -> bool { + matches!(self, UploadFormat::Config) + } + fn upload_path(&self, path: &Path) -> String { + match self { + UploadFormat::Config => "/config".to_string(), + UploadFormat::PemBundle => format!("/certificates/{}.pem", Self::file_stem(path)), + UploadFormat::Javascript => format!("/js_modules/{}.js", Self::file_stem(path)), + } + } + + fn file_stem(path: &Path) -> String { + path.file_stem().unwrap_or_default().to_string_lossy().into() + } +} + +pub async fn cmd(cli: &UnitCtl, directory: &PathBuf) -> Result<(), UnitctlError> { + if !directory.exists() { + return Err(UnitctlError::PathNotFound { + path: directory.to_string_lossy().into(), + }); + } + + let clients: Vec<_> = wait::wait_for_sockets(cli) + .await? + .into_iter() + .map(|sock| UnitClient::new(sock)) + .collect(); + + let mut results = vec![]; + for i in WalkDir::new(directory) + .follow_links(true) + .sort_by_file_name() + .into_iter() + .filter_map(Result::ok) + .filter(|e| !e.path().is_dir()) + { + for client in &clients { + results.push(process_entry(i.clone(), client).await); + } + } + + if results.iter().filter(|r| r.is_err()).count() == results.len() { + Err(UnitctlError::NoFilesImported) + } else { + println!("Imported {} files", results.len()); + Ok(()) + } +} + +async fn process_entry(entry: DirEntry, client: &UnitClient) -> Result<(), UnitctlError> { + let input_file = InputFile::from(entry.path()); + if input_file.format() == InputFormat::Unknown { + println!( + "Skipping unknown file type: {}", + input_file.to_path()?.to_string_lossy() + ); + return Err(UnitctlError::UnknownInputFileType { + path: input_file.to_path()?.to_string_lossy().into(), + }); + } + let upload_format = UploadFormat::from(&input_file); + let upload_path = upload_format.upload_path(entry.path()); + + // We can't overwrite JS or PEM files, so we delete them first + if !upload_format.can_be_overwritten() { + let _ = requests::send_empty_body_deserialize_response(client, "DELETE", upload_path.as_str()) + .await + .ok(); + } + + let result = match upload_format { + UploadFormat::Config => { + requests::send_and_validate_config_deserialize_response( + client, + "PUT", + upload_path.as_str(), + Some(&input_file), + ) + .await + } + UploadFormat::PemBundle => { + requests::send_and_validate_pem_data_deserialize_response(client, "PUT", upload_path.as_str(), &input_file) + .await + } + UploadFormat::Javascript => { + requests::send_body_deserialize_response::<UnitSerializableMap>( + client, + "PUT", + upload_path.as_str(), + Some(&input_file), + ) + .await + } + }; + + match result { + Ok(_) => { + eprintln!( + "Imported {} -> {}", + input_file.to_path()?.to_string_lossy(), + upload_path + ); + Ok(()) + } + Err(error) => { + eprintln!("Error {} -> {}", input_file.to_path()?.to_string_lossy(), error); + Err(error) + } + } +} diff --git a/tools/unitctl/unitctl/src/cmd/instances.rs b/tools/unitctl/unitctl/src/cmd/instances.rs new file mode 100644 index 00000000..92e09201 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/instances.rs @@ -0,0 +1,149 @@ +use crate::unitctl::{InstanceArgs, InstanceCommands}; +use crate::unitctl_error::ControlSocketErrorKind; +use crate::{OutputFormat, UnitctlError}; + +use std::path::PathBuf; +use unit_client_rs::control_socket_address::ControlSocket; +use unit_client_rs::unitd_docker::deploy_new_container; +use unit_client_rs::unitd_instance::UnitdInstance; + +pub(crate) async fn cmd(args: InstanceArgs) -> Result<(), UnitctlError> { + if let Some(cmd) = args.command { + match cmd { + InstanceCommands::New { + ref socket, + ref application, + ref application_read_only, + ref image, + } => { + // validation for application dir + if !PathBuf::from(application).is_dir() { + eprintln!("application path must be a directory"); + Err(UnitctlError::NoFilesImported) + } else if !PathBuf::from(application).as_path().exists() { + eprintln!("application path must exist"); + Err(UnitctlError::NoFilesImported) + } else { + let addr = ControlSocket::parse_address(socket); + if let Err(e) = addr { + return Err(UnitctlError::UnitClientError { source: e }); + } + + // validate we arent processing an abstract socket + if let ControlSocket::UnixLocalAbstractSocket(_) = addr.as_ref().unwrap() { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: "cannot pass abstract socket to docker container".to_string(), + }); + } + + // warn user of OSX docker limitations + if let ControlSocket::UnixLocalSocket(ref sock_path) = addr.as_ref().unwrap() { + if cfg!(target_os = "macos") { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: format!( + "Docker on macOS will break unix domain sockets mounted {} {}", + "in containers, see the following link for more information", + "https://github.com/docker/for-mac/issues/483" + ), + }); + } + + if !sock_path.is_dir() { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: "user must specify a directory of UNIX socket directory".to_string(), + }); + } + } + + // validate a TCP URI + if let ControlSocket::TcpSocket(uri) = addr.as_ref().unwrap() { + if let Some(host) = uri.host() { + if host != "127.0.0.1" { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: "TCP URI must point to 127.0.0.1".to_string(), + }); + } + } else { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: "TCP URI must point to a host".to_string(), + }); + } + + if let Some(port) = uri.port_u16() { + if port < 1025 { + eprintln!( + "warning! you are asking docker to forward a privileged port. {}", + "please make sure docker has access to it" + ); + } + } else { + return Err(UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: "TCP URI must specify a port".to_string(), + }); + } + + if uri.path() != "/" { + eprintln!("warning! path {} will be ignored", uri.path()) + } + } + + // reflect changes to user + // print this to STDERR to avoid polluting deserialized data output + eprintln!("> Pulling and starting a container from {}", image); + eprintln!("> Will mount {} to /www for application access", application); + + if *application_read_only { + eprintln!("> Application mount will be read only"); + } + + eprintln!("> Container will be on host network"); + match addr.as_ref().unwrap() { + ControlSocket::UnixLocalSocket(path) => eprintln!( + "> Will mount directory containing {} to /var/www for control API", + path.as_path().to_string_lossy() + ), + ControlSocket::TcpSocket(uri) => { + eprintln!("> Will forward port {} for control API", uri.port_u16().unwrap()) + } + _ => unimplemented!(), // abstract socket case ruled out previously + } + + if cfg!(target_os = "macos") { + eprintln!("> mac users: enable host networking in docker desktop"); + } + + // do the actual deployment + deploy_new_container(addr.unwrap(), application, *application_read_only, image) + .await + .map_or_else( + |e| Err(UnitctlError::UnitClientError { source: e }), + |warn| { + for i in warn { + eprintln!("warning! from docker: {}", i); + } + Ok(()) + }, + ) + } + } + } + } else { + let instances = UnitdInstance::running_unitd_instances().await; + if instances.is_empty() { + Err(UnitctlError::NoUnitInstancesError) + } else if args.output_format.eq(&OutputFormat::Text) { + instances.iter().for_each(|instance| { + println!("{}", instance); + }); + Ok(()) + } else { + args.output_format.write_to_stdout(&instances) + } + } +} diff --git a/tools/unitctl/unitctl/src/cmd/listeners.rs b/tools/unitctl/unitctl/src/cmd/listeners.rs new file mode 100644 index 00000000..05fbec07 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/listeners.rs @@ -0,0 +1,23 @@ +use crate::unitctl::UnitCtl; +use crate::wait; +use crate::{OutputFormat, UnitctlError, eprint_error}; +use unit_client_rs::unit_client::UnitClient; + +pub async fn cmd(cli: &UnitCtl, output_format: OutputFormat) -> Result<(), UnitctlError> { + let socks = wait::wait_for_sockets(cli) + .await?; + let clients = socks.iter() + .map(|sock| UnitClient::new(sock.clone())); + + for client in clients { + let _ = client.listeners() + .await + .map_err(|e| { + let err = UnitctlError::UnitClientError { source: *e }; + eprint_error(&err); + std::process::exit(err.exit_code()); + }) + .and_then(|response| output_format.write_to_stdout(&response)); + } + Ok(()) +} diff --git a/tools/unitctl/unitctl/src/cmd/mod.rs b/tools/unitctl/unitctl/src/cmd/mod.rs new file mode 100644 index 00000000..f2a2c120 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/mod.rs @@ -0,0 +1,8 @@ +pub(crate) mod applications; +pub(crate) mod edit; +pub(crate) mod execute; +pub(crate) mod import; +pub(crate) mod instances; +pub(crate) mod listeners; +pub(crate) mod status; +pub(crate) mod save; diff --git a/tools/unitctl/unitctl/src/cmd/save.rs b/tools/unitctl/unitctl/src/cmd/save.rs new file mode 100644 index 00000000..d93ce221 --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/save.rs @@ -0,0 +1,61 @@ +use crate::unitctl::UnitCtl; +use crate::wait; +use crate::UnitctlError; +use crate::requests::send_empty_body_deserialize_response; +use crate::unitctl_error::ControlSocketErrorKind; +use unit_client_rs::unit_client::UnitClient; +use tar::{Builder, Header}; +use std::fs::File; +use std::io::stdout; + + +pub async fn cmd( + cli: &UnitCtl, + filename: &String +) -> Result<(), UnitctlError> { + if cli.control_socket_addresses.is_some() && + cli.control_socket_addresses.clone().unwrap().len() > 1 { + return Err(UnitctlError::ControlSocketError{ + kind: ControlSocketErrorKind::General, + message: "too many control sockets. specify at most one.".to_string(), + }); + } + + let mut control_sockets = wait::wait_for_sockets(cli).await?; + let client = UnitClient::new(control_sockets.pop().unwrap()); + + if !filename.ends_with(".tar") { + eprintln!("Warning: writing uncompressed tarball to {}", filename); + } + + let config_res = serde_json::to_string_pretty( + &send_empty_body_deserialize_response(&client, "GET", "/config").await? + ); + if let Err(e) = config_res { + return Err(UnitctlError::DeserializationError{message: e.to_string()}) + } + + let current_config = config_res + .unwrap() + .into_bytes(); + + //let current_js_modules = send_empty_body_deserialize_response(&client, "GET", "/js_modules") + // .await?; + + let mut conf_header = Header::new_gnu(); + conf_header.set_size(current_config.len() as u64); + conf_header.set_mode(0o644); + conf_header.set_cksum(); + + // builder has a different type depending on output + if filename == "-" { + let mut ar = Builder::new(stdout()); + ar.append_data(&mut conf_header, "config.json", current_config.as_slice()).unwrap(); + } else { + let file = File::create(filename).unwrap(); + let mut ar = Builder::new(file); + ar.append_data(&mut conf_header, "config.json", current_config.as_slice()).unwrap(); + } + + Ok(()) +} diff --git a/tools/unitctl/unitctl/src/cmd/status.rs b/tools/unitctl/unitctl/src/cmd/status.rs new file mode 100644 index 00000000..6d5eb00a --- /dev/null +++ b/tools/unitctl/unitctl/src/cmd/status.rs @@ -0,0 +1,23 @@ +use crate::unitctl::UnitCtl; +use crate::wait; +use crate::{OutputFormat, UnitctlError, eprint_error}; +use unit_client_rs::unit_client::UnitClient; + +pub async fn cmd(cli: &UnitCtl, output_format: OutputFormat) -> Result<(), UnitctlError> { + let socks = wait::wait_for_sockets(cli) + .await?; + let clients = socks.iter() + .map(|sock| UnitClient::new(sock.clone())); + + for client in clients { + let _ = client.status() + .await + .map_err(|e| { + let err = UnitctlError::UnitClientError { source: *e }; + eprint_error(&err); + std::process::exit(err.exit_code()); + }) + .and_then(|response| output_format.write_to_stdout(&response)); + } + Ok(()) +} diff --git a/tools/unitctl/unitctl/src/inputfile.rs b/tools/unitctl/unitctl/src/inputfile.rs new file mode 100644 index 00000000..b2479d50 --- /dev/null +++ b/tools/unitctl/unitctl/src/inputfile.rs @@ -0,0 +1,289 @@ +use std::collections::HashMap; +use std::io; +use std::io::{BufRead, BufReader, Error as IoError, Read}; +use std::path::{Path, PathBuf}; + +use crate::known_size::KnownSize; +use clap::ValueEnum; + +use super::UnitSerializableMap; +use super::UnitctlError; + +/// Input file data format +#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq)] +pub enum InputFormat { + Yaml, + Json, + Json5, + Hjson, + Pem, + JavaScript, + Unknown, +} + +impl InputFormat { + pub fn from_file_extension<S>(file_extension: S) -> Self + where + S: Into<String>, + { + match file_extension.into().to_lowercase().as_str() { + "yaml" => InputFormat::Yaml, + "yml" => InputFormat::Yaml, + "json" => InputFormat::Json, + "json5" => InputFormat::Json5, + "hjson" => InputFormat::Hjson, + "cjson" => InputFormat::Hjson, + "pem" => InputFormat::Pem, + "js" => InputFormat::JavaScript, + "njs" => InputFormat::JavaScript, + _ => InputFormat::Unknown, + } + } + + /// This function allows us to infer the input format based on the remote path which is + /// useful when processing input from STDIN. + pub fn from_remote_path<S>(remote_path: S) -> Self + where + S: Into<String>, + { + let remote_upload_path = remote_path.into(); + let lead_slash_removed = remote_upload_path.trim_start_matches('/'); + let first_path = lead_slash_removed + .split_once('/') + .map_or(lead_slash_removed, |(first, _)| first); + match first_path { + "config" => InputFormat::Hjson, + "certificates" => InputFormat::Pem, + "js_modules" => InputFormat::JavaScript, + _ => InputFormat::Json, + } + } +} + +/// A "file" that can be used as input to a command +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum InputFile { + // Data received via STDIN + Stdin(InputFormat), + // Data that is on the file system where the format is inferred from the extension + File(Box<Path>), + // Data that is on the file system where the format is explicitly specified + FileWithFormat(Box<Path>, InputFormat), +} + +impl InputFile { + /// Creates a new instance of `InputFile` from a string + pub fn new<S>(file_path_or_dash: S, remote_path: S) -> Self + where + S: Into<String>, + { + let file_path: String = file_path_or_dash.into(); + + match file_path.as_str() { + "-" => InputFile::Stdin(InputFormat::from_remote_path(remote_path)), + _ => InputFile::File(PathBuf::from(&file_path).into_boxed_path()), + } + } + + /// Returns the format of the input file + pub fn format(&self) -> InputFormat { + match self { + InputFile::Stdin(format) => *format, + InputFile::File(path) => { + // Figure out the file format based on the file extension + match path.extension().and_then(|s| s.to_str()) { + Some(ext) => InputFormat::from_file_extension(ext), + None => InputFormat::Unknown, + } + } + InputFile::FileWithFormat(_file, format) => *format, + } + } + + pub fn mime_type(&self) -> String { + match self.format() { + InputFormat::Yaml => "application/x-yaml".to_string(), + InputFormat::Json => "application/json".to_string(), + InputFormat::Json5 => "application/json5".to_string(), + InputFormat::Hjson => "application/hjson".to_string(), + InputFormat::Pem => "application/x-pem-file".to_string(), + InputFormat::JavaScript => "application/javascript".to_string(), + InputFormat::Unknown => "application/octet-stream".to_string(), + } + } + + /// Returns true if the input file is in the format of a configuration file + pub fn is_config(&self) -> bool { + matches!( + self.format(), + InputFormat::Yaml | InputFormat::Json | InputFormat::Json5 | InputFormat::Hjson + ) + } + + pub fn is_javascript(&self) -> bool { + matches!(self.format(), InputFormat::JavaScript) + } + + pub fn is_pem_bundle(&self) -> bool { + matches!(self.format(), InputFormat::Pem) + } + + /// Returns the path to the input file if it is a file and not a stream + pub fn to_path(&self) -> Result<&Path, UnitctlError> { + match self { + InputFile::Stdin(_) => { + let io_error = IoError::new(std::io::ErrorKind::InvalidInput, "Input file is stdin"); + Err(UnitctlError::IoError { source: io_error }) + } + InputFile::File(path) | InputFile::FileWithFormat(path, _) => Ok(path), + } + } + + /// Converts a HJSON Value type to a JSON Value type + fn hjson_value_to_json_value(value: nu_json::Value) -> serde_json::Value { + serde_json::to_value(value).expect("Failed to convert HJSON value to JSON value") + } + + pub fn to_unit_serializable_map(&self) -> Result<UnitSerializableMap, UnitctlError> { + let reader: Box<dyn BufRead + Send> = self.try_into()?; + let body_data: UnitSerializableMap = match self.format() { + InputFormat::Yaml => serde_yaml::from_reader(reader) + .map_err(|e| UnitctlError::DeserializationError { message: e.to_string() })?, + InputFormat::Json => serde_json::from_reader(reader) + .map_err(|e| UnitctlError::DeserializationError { message: e.to_string() })?, + InputFormat::Json5 => { + let mut reader = BufReader::new(reader); + let mut json5_string: String = String::new(); + reader + .read_to_string(&mut json5_string) + .map_err(|e| UnitctlError::DeserializationError { message: e.to_string() })?; + json5::from_str(&json5_string) + .map_err(|e| UnitctlError::DeserializationError { message: e.to_string() })? + } + InputFormat::Hjson => { + let hjson_value: HashMap<String, nu_json::Value> = nu_json::from_reader(reader) + .map_err(|e| UnitctlError::DeserializationError { message: e.to_string() })?; + + hjson_value + .iter() + .map(|(k, v)| { + let json_value = Self::hjson_value_to_json_value(v.clone()); + (k.clone(), json_value) + }) + .collect() + } + _ => Err(UnitctlError::DeserializationError { + message: format!("Unsupported input format for serialization: {:?}", self), + })?, + }; + Ok(body_data) + } +} + +impl From<&Path> for InputFile { + fn from(path: &Path) -> Self { + InputFile::File(path.into()) + } +} + +impl TryInto<Box<dyn BufRead + Send>> for &InputFile { + type Error = UnitctlError; + + fn try_into(self) -> Result<Box<dyn BufRead + Send>, Self::Error> { + let reader: Box<dyn BufRead + Send> = match self { + InputFile::Stdin(_) => Box::new(BufReader::new(io::stdin())), + InputFile::File(_) | InputFile::FileWithFormat(_, _) => { + let path = self.to_path()?; + let file = std::fs::File::open(path).map_err(|e| UnitctlError::IoError { source: e })?; + let reader = Box::new(BufReader::new(file)); + Box::new(reader) + } + }; + Ok(reader) + } +} + +impl TryInto<Vec<u8>> for &InputFile { + type Error = UnitctlError; + + fn try_into(self) -> Result<Vec<u8>, Self::Error> { + let mut buf: Vec<u8> = vec![]; + let mut reader: Box<dyn BufRead + Send> = self.try_into()?; + reader + .read_to_end(&mut buf) + .map_err(|e| UnitctlError::IoError { source: e })?; + Ok(buf) + } +} + +impl TryInto<KnownSize> for &InputFile { + type Error = UnitctlError; + + fn try_into(self) -> Result<KnownSize, Self::Error> { + let known_size: KnownSize = match self { + InputFile::Stdin(_) => { + let mut buf: Vec<u8> = vec![]; + let _ = io::stdin() + .read_to_end(&mut buf) + .map_err(|e| UnitctlError::IoError { source: e })?; + KnownSize::Vec(buf) + } + InputFile::File(_) | InputFile::FileWithFormat(_, _) => { + let path = self.to_path()?; + let file = std::fs::File::open(path).map_err(|e| UnitctlError::IoError { source: e })?; + let len = file.metadata().map_err(|e| UnitctlError::IoError { source: e })?.len(); + let reader = Box::new(file); + KnownSize::Read(reader, len) + } + }; + Ok(known_size) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn can_parse_file_extensions() { + assert_eq!(InputFormat::from_file_extension("yaml"), InputFormat::Yaml); + assert_eq!(InputFormat::from_file_extension("yml"), InputFormat::Yaml); + assert_eq!(InputFormat::from_file_extension("json"), InputFormat::Json); + assert_eq!(InputFormat::from_file_extension("json5"), InputFormat::Json5); + assert_eq!(InputFormat::from_file_extension("pem"), InputFormat::Pem); + assert_eq!(InputFormat::from_file_extension("js"), InputFormat::JavaScript); + assert_eq!(InputFormat::from_file_extension("njs"), InputFormat::JavaScript); + assert_eq!(InputFormat::from_file_extension("txt"), InputFormat::Unknown); + } + + #[test] + fn can_parse_remote_paths() { + assert_eq!(InputFormat::from_remote_path("//config"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("/config"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("/config/"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("config/"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("config"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("/config/something/"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("config/something/"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("config/something"), InputFormat::Hjson); + assert_eq!(InputFormat::from_remote_path("/certificates"), InputFormat::Pem); + assert_eq!(InputFormat::from_remote_path("/certificates/"), InputFormat::Pem); + assert_eq!(InputFormat::from_remote_path("certificates/"), InputFormat::Pem); + assert_eq!(InputFormat::from_remote_path("certificates"), InputFormat::Pem); + assert_eq!(InputFormat::from_remote_path("js_modules"), InputFormat::JavaScript); + assert_eq!(InputFormat::from_remote_path("js_modules/"), InputFormat::JavaScript); + + assert_eq!( + InputFormat::from_remote_path("/certificates/something/"), + InputFormat::Pem + ); + assert_eq!( + InputFormat::from_remote_path("certificates/something/"), + InputFormat::Pem + ); + assert_eq!( + InputFormat::from_remote_path("certificates/something"), + InputFormat::Pem + ); + } +} diff --git a/tools/unitctl/unitctl/src/known_size.rs b/tools/unitctl/unitctl/src/known_size.rs new file mode 100644 index 00000000..d73aff91 --- /dev/null +++ b/tools/unitctl/unitctl/src/known_size.rs @@ -0,0 +1,77 @@ +use futures::Stream; +use hyper::Body; +use std::io; +use std::io::{Cursor, Read}; +use std::pin::Pin; +use std::task::{Context, Poll}; + +pub enum KnownSize { + Vec(Vec<u8>), + Read(Box<dyn Read + Send>, u64), + String(String), + Empty, +} + +impl KnownSize { + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn len(&self) -> u64 { + match self { + KnownSize::Vec(v) => v.len() as u64, + KnownSize::Read(_, size) => *size, + KnownSize::String(s) => s.len() as u64, + KnownSize::Empty => 0, + } + } +} + +impl Stream for KnownSize { + type Item = io::Result<Vec<u8>>; + + fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { + let buf = &mut [0u8; 1024]; + + if let KnownSize::Read(r, _) = self.get_mut() { + return match r.read(buf) { + Ok(0) => Poll::Ready(None), + Ok(n) => Poll::Ready(Some(Ok(buf[..n].to_vec()))), + Err(e) => Poll::Ready(Some(Err(e))), + }; + } + + panic!("not implemented") + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (0, Some(self.len() as usize)) + } +} + +impl From<KnownSize> for Box<dyn Read + Send> { + fn from(value: KnownSize) -> Self { + match value { + KnownSize::Vec(v) => Box::new(Cursor::new(v)), + KnownSize::Read(r, _) => r, + KnownSize::String(s) => Box::new(Cursor::new(s)), + KnownSize::Empty => Box::new(Cursor::new(Vec::new())), + } + } +} + +impl From<KnownSize> for Body { + fn from(value: KnownSize) -> Self { + if value.is_empty() { + return Body::empty(); + } + if let KnownSize::Vec(v) = value { + return Body::from(v); + } + if let KnownSize::String(s) = value { + return Body::from(s); + } + + Body::wrap_stream(value) + } +} diff --git a/tools/unitctl/unitctl/src/main.rs b/tools/unitctl/unitctl/src/main.rs new file mode 100644 index 00000000..dc3c09d1 --- /dev/null +++ b/tools/unitctl/unitctl/src/main.rs @@ -0,0 +1,60 @@ +extern crate clap; +extern crate colored_json; +extern crate custom_error; +extern crate nu_json; +extern crate rustls_pemfile; +extern crate serde; +extern crate unit_client_rs; + +use clap::Parser; + +use crate::cmd::{ + applications, edit, execute as execute_cmd, + import, instances, listeners, status, + save +}; +use crate::output_format::OutputFormat; +use crate::unitctl::{Commands, UnitCtl}; +use crate::unitctl_error::{UnitctlError, eprint_error}; +use unit_client_rs::unit_client::{UnitClient, UnitSerializableMap}; + +mod cmd; +mod inputfile; +pub mod known_size; +mod output_format; +mod requests; +mod unitctl; +mod unitctl_error; +mod wait; + +#[tokio::main] +async fn main() -> Result<(), UnitctlError> { + let cli = UnitCtl::parse(); + + match cli.command { + Commands::Instances(args) => instances::cmd(args).await, + + Commands::Apps(ref args) => applications::cmd(&cli, args).await, + + Commands::Edit { output_format } => edit::cmd(&cli, output_format).await, + + Commands::Import { ref directory } => import::cmd(&cli, directory).await, + + Commands::Execute { + ref output_format, + ref input_file, + ref method, + ref path, + } => execute_cmd::cmd(&cli, output_format, input_file, method, path).await, + + Commands::Status { output_format } => status::cmd(&cli, output_format).await, + + Commands::Listeners { output_format } => listeners::cmd(&cli, output_format).await, + + Commands::Export { ref filename } => save::cmd(&cli, filename).await, + } + .map_err(|error| { + eprint_error(&error); + std::process::exit(error.exit_code()); + }) +} diff --git a/tools/unitctl/unitctl/src/output_format.rs b/tools/unitctl/unitctl/src/output_format.rs new file mode 100644 index 00000000..eb7f954e --- /dev/null +++ b/tools/unitctl/unitctl/src/output_format.rs @@ -0,0 +1,43 @@ +use crate::UnitctlError; +use clap::ValueEnum; +use colored_json::ColorMode; +use serde::Serialize; +use std::io::{stdout, BufWriter, Write}; + +#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) enum OutputFormat { + Yaml, + Json, + #[value(id = "json-pretty")] + JsonPretty, + Text, +} + +impl OutputFormat { + pub fn write_to_stdout<T>(&self, object: &T) -> Result<(), UnitctlError> + where + T: ?Sized + Serialize, + { + let no_color = std::env::var("NO_COLOR").map_or(false, |_| true); + let mut out = stdout(); + let value = + serde_json::to_value(object).map_err(|e| UnitctlError::SerializationError { message: e.to_string() })?; + + match (self, no_color) { + (OutputFormat::Yaml, _) => serde_yaml::to_writer(BufWriter::new(out), &value) + .map_err(|e| UnitctlError::SerializationError { message: e.to_string() }), + (OutputFormat::Json, _) => serde_json::to_writer(BufWriter::new(out), &value) + .map_err(|e| UnitctlError::SerializationError { message: e.to_string() }), + (OutputFormat::JsonPretty, true) => serde_json::to_writer_pretty(BufWriter::new(out), &value) + .map_err(|e| UnitctlError::SerializationError { message: e.to_string() }), + (OutputFormat::JsonPretty, false) => { + let mode = ColorMode::Auto(colored_json::Output::StdOut); + colored_json::write_colored_json_with_mode(&value, &mut out, mode) + .map_err(|e| UnitctlError::SerializationError { message: e.to_string() }) + } + (OutputFormat::Text, _) => stdout() + .write_fmt(format_args!("{:?}", &value)) + .map_err(|e| UnitctlError::IoError { source: e }), + } + } +} diff --git a/tools/unitctl/unitctl/src/requests.rs b/tools/unitctl/unitctl/src/requests.rs new file mode 100644 index 00000000..2743c984 --- /dev/null +++ b/tools/unitctl/unitctl/src/requests.rs @@ -0,0 +1,178 @@ +use super::inputfile::InputFile; +use super::UnitClient; +use super::UnitSerializableMap; +use super::UnitctlError; +use crate::known_size::KnownSize; +use hyper::{Body, Request}; +use rustls_pemfile::Item; +use std::collections::HashMap; +use std::io::Cursor; +use std::sync::atomic::AtomicUsize; +use unit_client_rs::unit_client::UnitClientError; + +/// Send the contents of a file to the unit server +/// We assume that the file is valid and can be sent to the server +pub async fn send_and_validate_config_deserialize_response( + client: &UnitClient, + method: &str, + path: &str, + input_file: Option<&InputFile>, +) -> Result<UnitSerializableMap, UnitctlError> { + let body_data = match input_file { + Some(input) => Some(input.to_unit_serializable_map()?), + None => None, + }; + + /* Unfortunately, we have load the json text into memory before sending it to the server. + * This allows for validation of the json content before sending to the server. There may be + * a better way of doing this and it is worth investigating. */ + let json = serde_json::to_value(&body_data).map_err(|error| UnitClientError::JsonError { + source: error, + path: path.into(), + })?; + + let mime_type = input_file.map(|f| f.mime_type()); + let reader = KnownSize::String(json.to_string()); + + streaming_upload_deserialize_response(client, method, path, mime_type, reader) + .await + .map_err(|e| UnitctlError::UnitClientError { source: e }) +} + +/// Send an empty body to the unit server +pub async fn send_empty_body_deserialize_response( + client: &UnitClient, + method: &str, + path: &str, +) -> Result<UnitSerializableMap, UnitctlError> { + send_body_deserialize_response(client, method, path, None).await +} + +/// Send the contents of a PEM file to the unit server +pub async fn send_and_validate_pem_data_deserialize_response( + client: &UnitClient, + method: &str, + path: &str, + input_file: &InputFile, +) -> Result<UnitSerializableMap, UnitctlError> { + let bytes: Vec<u8> = input_file.try_into()?; + { + let mut cursor = Cursor::new(&bytes); + let items = rustls_pemfile::read_all(&mut cursor) + .map(|item| item.map_err(|e| UnitctlError::IoError { source: e })) + .collect(); + validate_pem_items(items)?; + } + let known_size = KnownSize::Vec((*bytes).to_owned()); + + streaming_upload_deserialize_response(client, method, path, Some(input_file.mime_type()), known_size) + .await + .map_err(|e| UnitctlError::UnitClientError { source: e }) +} + +/// Validate the contents of a PEM file +fn validate_pem_items(pem_items: Vec<Result<Item, UnitctlError>>) -> Result<(), UnitctlError> { + fn item_name(item: Item) -> String { + match item { + Item::X509Certificate(_) => "X509Certificate", + Item::Sec1Key(_) => "Sec1Key", + Item::Crl(_) => "Crl", + Item::Pkcs1Key(_) => "Pkcs1Key", + Item::Pkcs8Key(_) => "Pkcs8Key", + // Note: this is not a valid PEM item, but rustls_pemfile library defines the enum as non-exhaustive + _ => "Unknown", + } + .to_string() + } + + if pem_items.is_empty() { + let error = UnitctlError::CertificateError { + message: "No certificates found in file".to_string(), + }; + return Err(error); + } + + let mut items_tally: HashMap<String, AtomicUsize> = HashMap::new(); + + for pem_item_result in pem_items { + let pem_item = pem_item_result?; + let key = item_name(pem_item); + if let Some(count) = items_tally.get_mut(key.clone().as_str()) { + count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + } else { + items_tally.insert(key, AtomicUsize::new(1)); + } + } + + let key_count = items_tally + .iter() + .filter(|(key, _)| key.ends_with("Key")) + .fold(0, |acc, (_, count)| { + acc + count.load(std::sync::atomic::Ordering::Relaxed) + }); + let cert_count = items_tally + .iter() + .filter(|(key, _)| key.ends_with("Certificate")) + .fold(0, |acc, (_, count)| { + acc + count.load(std::sync::atomic::Ordering::Relaxed) + }); + + if key_count == 0 { + let error = UnitctlError::CertificateError { + message: "No private keys found in file".to_string(), + }; + return Err(error); + } + if cert_count == 0 { + let error = UnitctlError::CertificateError { + message: "No certificates found in file".to_string(), + }; + return Err(error); + } + + Ok(()) +} + +pub async fn send_body_deserialize_response<RESPONSE: for<'de> serde::Deserialize<'de>>( + client: &UnitClient, + method: &str, + path: &str, + input_file: Option<&InputFile>, +) -> Result<RESPONSE, UnitctlError> { + match input_file { + Some(input) => { + streaming_upload_deserialize_response(client, method, path, Some(input.mime_type()), input.try_into()?) + } + None => streaming_upload_deserialize_response(client, method, path, None, KnownSize::Empty), + } + .await + .map_err(|e| UnitctlError::UnitClientError { source: e }) +} + +async fn streaming_upload_deserialize_response<RESPONSE: for<'de> serde::Deserialize<'de>>( + client: &UnitClient, + method: &str, + path: &str, + mime_type: Option<String>, + read: KnownSize, +) -> Result<RESPONSE, UnitClientError> { + let uri = client.control_socket.create_uri_with_path(path); + + let content_length = read.len(); + let body = Body::from(read); + + let mut request = Request::builder() + .method(method) + .header("Content-Length", content_length) + .uri(uri) + .body(body) + .expect("Unable to build request"); + + if let Some(content_type) = mime_type { + request + .headers_mut() + .insert("Content-Type", content_type.parse().unwrap()); + } + + client.send_request_and_deserialize_response(request).await +} diff --git a/tools/unitctl/unitctl/src/unitctl.rs b/tools/unitctl/unitctl/src/unitctl.rs new file mode 100644 index 00000000..43f2b777 --- /dev/null +++ b/tools/unitctl/unitctl/src/unitctl.rs @@ -0,0 +1,222 @@ +extern crate clap; + +use crate::output_format::OutputFormat; +use clap::error::ErrorKind::ValueValidation; +use clap::{Args, Error as ClapError, Parser, Subcommand}; +use std::path::PathBuf; +use unit_client_rs::control_socket_address::ControlSocket; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about)] +pub(crate) struct UnitCtl { + #[arg( + required = false, + short = 's', + long = "control-socket-address", + value_parser = parse_control_socket_address, + value_name = "CONTROL_SOCKET_ADDRESS", + help = "Path (unix:/var/run/unit/control.sock), tcp address with port (127.0.0.1:80), or URL. This flag can be specified multiple times." + )] + pub(crate) control_socket_addresses: Option<Vec<ControlSocket>>, + + #[arg( + required = false, + default_missing_value = "1", + value_parser = parse_u8, + short = 'w', + long = "wait-timeout-seconds", + help = "Number of seconds to wait for control socket to become available" + )] + pub(crate) wait_time_seconds: Option<u8>, + + #[arg( + required = false, + default_value = "3", + value_parser = parse_u8, + short = 't', + long = "wait-max-tries", + help = "Number of times to try to access control socket when waiting" + )] + pub(crate) wait_max_tries: Option<u8>, + + #[command(subcommand)] + pub(crate) command: Commands, +} + +#[derive(Debug, Subcommand)] +pub(crate) enum Commands { + #[command(about = "List all running Unit processes")] + Instances(InstanceArgs), + + #[command(about = "Open current Unit configuration in editor")] + Edit { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + output_format: OutputFormat, + }, + + #[command(about = "Import configuration from a directory")] + Import { + #[arg(required = true, help = "Directory to import from")] + directory: PathBuf, + }, + + #[command(about = "Sends raw JSON payload to Unit")] + Execute { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + output_format: OutputFormat, + + #[arg( + required = false, + global = true, + short = 'f', + long = "file", + help = "Input file (json, json5, cjson, hjson yaml, pem) to send to unit when applicable use - for stdin" + )] + input_file: Option<String>, + + #[arg( + required = true, + short = 'm', + long = "http-method", + value_parser = parse_http_method, + help = "HTTP method to use (GET, POST, PUT, DELETE)", + )] + method: String, + + #[arg(required = true, short = 'p', long = "path")] + path: String, + }, + + #[command(about = "Get the current status of Unit")] + Status { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + output_format: OutputFormat, + }, + + #[command(about = "List active listeners")] + Listeners { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + output_format: OutputFormat, + }, + + #[command(about = "List all configured Unit applications")] + Apps(ApplicationArgs), + + #[command(about = "Export the current configuration of Unit")] + Export { + #[arg(required = true, short = 'f', help = "tarball filename to save configuration to")] + filename: String, + }, +} + +#[derive(Debug, Args)] +pub struct InstanceArgs { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + pub output_format: OutputFormat, + + #[command(subcommand)] + pub command: Option<InstanceCommands>, +} + +#[derive(Debug, Subcommand)] +#[command(args_conflicts_with_subcommands = true)] +pub enum InstanceCommands { + #[command(about = "deploy a new docker instance of Unit")] + New { + #[arg(required = true, help = "Path to mount control socket to host")] + socket: String, + + #[arg(required = true, help = "Path to mount application into container")] + application: String, + + #[arg(help = "Mount application directory as read only", short = 'r', long = "read-only")] + application_read_only: bool, + + #[arg( + help = "Unitd Image to deploy", + default_value = env!("CARGO_PKG_VERSION"), + )] + image: String, + }, +} + +#[derive(Debug, Args)] +pub struct ApplicationArgs { + #[arg( + required = false, + global = true, + short = 't', + long = "output-format", + default_value = "json-pretty", + help = "Output format of the result" + )] + pub output_format: OutputFormat, + + #[command(subcommand)] + pub command: ApplicationCommands, +} + +#[derive(Debug, Subcommand)] +#[command(args_conflicts_with_subcommands = true)] +pub enum ApplicationCommands { + #[command(about = "restart a running application")] + Restart { + #[arg(required = true, help = "name of application")] + name: String, + }, + + #[command(about = "list running applications")] + List {}, +} + +fn parse_control_socket_address(s: &str) -> Result<ControlSocket, ClapError> { + ControlSocket::try_from(s).map_err(|e| ClapError::raw(ValueValidation, e.to_string())) +} + +fn parse_http_method(s: &str) -> Result<String, ClapError> { + let method = s.to_uppercase(); + match method.as_str() { + "GET" | "POST" | "PUT" | "DELETE" => Ok(method), + _ => Err(ClapError::raw(ValueValidation, format!("Invalid HTTP method: {}", s))), + } +} + +fn parse_u8(s: &str) -> Result<u8, ClapError> { + s.parse::<u8>() + .map_err(|e| ClapError::raw(ValueValidation, format!("Invalid number: {}", e))) +} diff --git a/tools/unitctl/unitctl/src/unitctl_error.rs b/tools/unitctl/unitctl/src/unitctl_error.rs new file mode 100644 index 00000000..83b2da46 --- /dev/null +++ b/tools/unitctl/unitctl/src/unitctl_error.rs @@ -0,0 +1,125 @@ +use std::fmt::{Display, Formatter}; +use std::io::Error as IoError; +use std::process::{ExitCode, Termination}; +use unit_client_rs::unit_client::UnitClientError; + +use custom_error::custom_error; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) enum ControlSocketErrorKind { + NotFound, + Permissions, + Parse, + General, +} + +impl Display for ControlSocketErrorKind { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + writeln!(f, "{:?}", self) + } +} + +custom_error! {pub UnitctlError + ControlSocketError { kind: ControlSocketErrorKind, message: String } = "{message}", + CertificateError { message: String } = "Certificate error: {message}", + EditorError { message: String } = "Error opening editor: {message}", + NoUnitInstancesError = "No running unit instances found", + MultipleUnitInstancesError { + suggestion: String} = "Multiple unit instances found: {suggestion}", + NoSocketPathError = "Unable to detect socket path from running instance", + NoInputFileError = "No input file specified when required", + UiServerError { message: String } = "UI server error: {message}", + UnitClientError { source: UnitClientError } = "Unit client error: {source}", + SerializationError { message: String } = "Serialization error: {message}", + DeserializationError { message: String } = "Deserialization error: {message}", + IoError { source: IoError } = "IO error: {source}", + PathNotFound { path: String } = "Path not found: {path}", + UnknownInputFileType { path: String } = "Unknown input type for file: {path}", + NoFilesImported = "All imports failed", + WaitTimeoutError = "Timeout waiting for unit to start has been exceeded", +} + +impl UnitctlError { + pub fn exit_code(&self) -> i32 { + match self { + UnitctlError::NoUnitInstancesError => 10, + UnitctlError::MultipleUnitInstancesError { .. } => 11, + UnitctlError::NoSocketPathError => 12, + UnitctlError::UnitClientError { .. } => 13, + UnitctlError::WaitTimeoutError => 14, + _ => 99, + } + } + + pub fn retryable(&self) -> bool { + match self { + UnitctlError::ControlSocketError { kind, .. } => { + // try again because there is no socket created yet + ControlSocketErrorKind::NotFound == *kind + } + // try again because unit isn't running + UnitctlError::NoUnitInstancesError => true, + // do not retry because this is an unrecoverable error + _ => false, + } + } +} + +impl Termination for UnitctlError { + fn report(self) -> ExitCode { + ExitCode::from(self.exit_code() as u8) + } +} + +pub fn eprint_error(error: &UnitctlError) { + match error { + UnitctlError::NoUnitInstancesError => { + eprintln!("No running unit instances found"); + } + UnitctlError::MultipleUnitInstancesError { ref suggestion } => { + eprintln!("{}", suggestion); + } + UnitctlError::NoSocketPathError => { + eprintln!("Unable to detect socket path from running instance"); + } + UnitctlError::UnitClientError { source } => match source { + UnitClientError::SocketPermissionsError { .. } => { + eprintln!("{}", source); + eprintln!("Try running again with the same permissions as the unit control socket"); + } + UnitClientError::OpenAPIError { source } => { + eprintln!("OpenAPI Error: {}", source); + } + _ => { + eprintln!("Unit client error: {}", source); + } + }, + UnitctlError::SerializationError { message } => { + eprintln!("Serialization error: {}", message); + } + UnitctlError::DeserializationError { message } => { + eprintln!("Deserialization error: {}", message); + } + UnitctlError::IoError { ref source } => { + eprintln!("IO error: {}", source); + } + UnitctlError::PathNotFound { path } => { + eprintln!("Path not found: {}", path); + } + UnitctlError::EditorError { message } => { + eprintln!("Error opening editor: {}", message); + } + UnitctlError::CertificateError { message } => { + eprintln!("Certificate error: {}", message); + } + UnitctlError::NoInputFileError => { + eprintln!("No input file specified when required"); + } + UnitctlError::UiServerError { ref message } => { + eprintln!("UI server error: {}", message); + } + _ => { + eprintln!("{}", error); + } + } +} diff --git a/tools/unitctl/unitctl/src/wait.rs b/tools/unitctl/unitctl/src/wait.rs new file mode 100644 index 00000000..860fb0b5 --- /dev/null +++ b/tools/unitctl/unitctl/src/wait.rs @@ -0,0 +1,147 @@ +use crate::unitctl::UnitCtl; +use crate::unitctl_error::{ControlSocketErrorKind, UnitctlError}; +use std::time::Duration; +use unit_client_rs::control_socket_address::ControlSocket; +use unit_client_rs::unit_client::{UnitClient, UnitClientError}; +use unit_client_rs::unitd_instance::UnitdInstance; + +/// Waits for a socket to become available. Availability is tested by attempting to access the +/// status endpoint via the control socket. When socket is available, ControlSocket instance +/// is returned. +pub async fn wait_for_sockets(cli: &UnitCtl) -> Result<Vec<ControlSocket>, UnitctlError> { + let socks: Vec<ControlSocket>; + match &cli.control_socket_addresses { + None => { + socks = vec![find_socket_address_from_instance().await?]; + }, + Some(s) => socks = s.clone(), + } + + let mut mapped = vec![]; + for addr in socks { + if cli.wait_time_seconds.is_none() { + mapped.push(addr.to_owned().validate()?); + continue; + } + + let wait_time = + Duration::from_secs(cli.wait_time_seconds.expect("wait_time_option default was not applied") as u64); + let max_tries = cli.wait_max_tries.expect("max_tries_option default was not applied"); + + let mut attempt = 0; + while attempt < max_tries { + if attempt > 0 { + eprintln!( + "Waiting for {}s control socket to be available try {}/{}...", + wait_time.as_secs(), + attempt + 1, + max_tries + ); + std::thread::sleep(wait_time); + } + + attempt += 1; + + let res = addr.to_owned().validate(); + if res.is_err() { + let err = res.map_err(|error| match error { + UnitClientError::UnixSocketNotFound { .. } => UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::NotFound, + message: format!("{}", error), + }, + UnitClientError::SocketPermissionsError { .. } => UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::Permissions, + message: format!("{}", error), + }, + UnitClientError::TcpSocketAddressUriError { .. } + | UnitClientError::TcpSocketAddressNoPortError { .. } + | UnitClientError::TcpSocketAddressParseError { .. } => UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::Parse, + message: format!("{}", error), + }, + _ => UnitctlError::ControlSocketError { + kind: ControlSocketErrorKind::General, + message: format!("{}", error), + }, + }); + if err.as_ref().is_err_and(|e| e.retryable()) { + continue; + } else { + return Err(err.expect_err("impossible error condition")); + } + } else { + let sock = res.unwrap(); + if let Err(e) = UnitClient::new(sock.clone()).status().await { + eprintln!("Unable to access status endpoint: {}", *e); + continue; + } + mapped.push(sock); + } + } + + if attempt >= max_tries { + return Err(UnitctlError::WaitTimeoutError); + } + } + + return Ok(mapped); +} + +async fn find_socket_address_from_instance() -> Result<ControlSocket, UnitctlError> { + let instances = UnitdInstance::running_unitd_instances().await; + if instances.is_empty() { + return Err(UnitctlError::NoUnitInstancesError); + } else if instances.len() > 1 { + let suggestion: String = "Multiple unit instances found. Specify the socket address(es) to the instance you wish \ + to control using the `--control-socket-address` flag" + .to_string(); + return Err(UnitctlError::MultipleUnitInstancesError { suggestion }); + } + + let instance = instances.first().unwrap(); + match instance.control_api_socket_address() { + Some(path) => Ok(ControlSocket::try_from(path).unwrap()), + None => Err(UnitctlError::NoSocketPathError), + } +} + +#[tokio::test] +async fn wait_for_unavailable_unix_socket() { + let control_socket = ControlSocket::try_from("unix:/tmp/this_socket_does_not_exist.sock"); + let cli = UnitCtl { + control_socket_addresses: Some(vec![control_socket.unwrap()]), + wait_time_seconds: Some(1u8), + wait_max_tries: Some(3u8), + command: crate::unitctl::Commands::Status { + output_format: crate::output_format::OutputFormat::JsonPretty, + }, + }; + let error = wait_for_sockets(&cli) + .await + .expect_err("Expected error, but no error received"); + match error { + UnitctlError::WaitTimeoutError => {} + _ => panic!("Expected WaitTimeoutError: {}", error), + } +} + +#[tokio::test] +async fn wait_for_unavailable_tcp_socket() { + let control_socket = ControlSocket::try_from("http://127.0.0.1:9783456"); + let cli = UnitCtl { + control_socket_addresses: Some(vec![control_socket.unwrap()]), + wait_time_seconds: Some(1u8), + wait_max_tries: Some(3u8), + command: crate::unitctl::Commands::Status { + output_format: crate::output_format::OutputFormat::JsonPretty, + }, + }; + + let error = wait_for_sockets(&cli) + .await + .expect_err("Expected error, but no error received"); + match error { + UnitctlError::WaitTimeoutError => {} + _ => panic!("Expected WaitTimeoutError"), + } +} @@ -1,5 +1,5 @@ # Copyright (C) NGINX, Inc. -NXT_VERSION=1.32.1 -NXT_VERNUM=13201 +NXT_VERSION=1.33.0 +NXT_VERNUM=13300 |