From a85778993ef2230e9322c45af70c0f390e6cd2dc Mon Sep 17 00:00:00 2001 From: Pietro Albini Date: Mon, 20 May 2024 15:43:54 +0200 Subject: [PATCH] squash criticalup source code Co-authored-by: Amanjeev Sethi Co-authored-by: Ana Hobden Co-authored-by: Jonathan Pallant Co-authored-by: Sebastian Ziebell --- .cargo/config.toml | 2 + .github/workflows/ci.yml | 138 + .github/workflows/release.yml | 275 ++ .gitignore | 4 + .gitmodules | 3 + Cargo.lock | 2601 +++++++++++++++++ Cargo.toml | 38 + bors.toml | 12 + crates/criticaltrust/Cargo.toml | 31 + crates/criticaltrust/src/errors.rs | 41 + .../src/integrity/detect_manifest.rs | 223 ++ crates/criticaltrust/src/integrity/mod.rs | 47 + .../criticaltrust/src/integrity/verifier.rs | 913 ++++++ .../ecdsa_p256_sha256_asn1_spki_der.rs | 136 + .../criticaltrust/src/keys/algorithms/mod.rs | 78 + crates/criticaltrust/src/keys/mod.rs | 20 + crates/criticaltrust/src/keys/newtypes.rs | 70 + crates/criticaltrust/src/keys/pair.rs | 17 + crates/criticaltrust/src/keys/pair_aws_kms.rs | 263 ++ .../criticaltrust/src/keys/pair_ephemeral.rs | 116 + crates/criticaltrust/src/keys/public.rs | 410 +++ crates/criticaltrust/src/lib.rs | 14 + crates/criticaltrust/src/manifests.rs | 195 ++ crates/criticaltrust/src/serde_base64.rs | 111 + crates/criticaltrust/src/sha256.rs | 28 + .../criticaltrust/src/signatures/keychain.rs | 171 ++ crates/criticaltrust/src/signatures/mod.rs | 11 + .../criticaltrust/src/signatures/payload.rs | 453 +++ crates/criticaltrust/src/test_utils.rs | 60 + crates/criticalup-cli/Cargo.toml | 37 + crates/criticalup-cli/README.md | 15 + .../criticalup-cli/src/bin/criticalup-test.rs | 26 + crates/criticalup-cli/src/binary_proxies.rs | 142 + crates/criticalup-cli/src/commands/auth.rs | 40 + .../src/commands/auth_remove.rs | 14 + .../criticalup-cli/src/commands/auth_set.rs | 92 + crates/criticalup-cli/src/commands/clean.rs | 109 + crates/criticalup-cli/src/commands/install.rs | 215 ++ crates/criticalup-cli/src/commands/mod.rs | 8 + crates/criticalup-cli/src/commands/remove.rs | 38 + crates/criticalup-cli/src/commands/run.rs | 52 + crates/criticalup-cli/src/commands/which.rs | 24 + crates/criticalup-cli/src/errors.rs | 82 + crates/criticalup-cli/src/lib.rs | 165 ++ crates/criticalup-cli/src/spawn.rs | 52 + crates/criticalup-cli/tests/cli/auth.rs | 54 + .../criticalup-cli/tests/cli/auth_remove.rs | 45 + crates/criticalup-cli/tests/cli/auth_set.rs | 208 ++ .../tests/cli/binary_proxies.rs | 105 + crates/criticalup-cli/tests/cli/clean.rs | 254 ++ crates/criticalup-cli/tests/cli/install.rs | 125 + crates/criticalup-cli/tests/cli/main.rs | 13 + crates/criticalup-cli/tests/cli/remove.rs | 70 + crates/criticalup-cli/tests/cli/root.rs | 37 + crates/criticalup-cli/tests/cli/run.rs | 116 + crates/criticalup-cli/tests/cli/utils.rs | 249 ++ crates/criticalup-cli/tests/cli/which.rs | 64 + .../tests/resources/criticalup-which.toml | 8 + .../tests/resources/criticalup.toml | 8 + .../snapshots/cli__auth__help_message.snap | 23 + .../snapshots/cli__auth__invalid_token.snap | 21 + .../tests/snapshots/cli__auth__no_token.snap | 21 + .../cli__auth__token_with_expiry.snap | 16 + .../cli__auth__token_without_expiry.snap | 16 + .../cli__auth_remove__help_message.snap | 19 + .../cli__auth_remove__token_missing.snap | 12 + .../cli__auth_remove__token_present.snap | 12 + .../cli__auth_set__byte_zero_via_stdin.snap | 15 + .../cli__auth_set__help_message.snap | 22 + ...auth_set__via_args__set_invalid_token.snap | 15 + ...set_invalid_token_with_existing_token.snap | 15 + ...__auth_set__via_args__set_valid_token.snap | 12 + ...__set_valid_token_with_existing_token.snap | 12 + ...uth_set__via_stdin__set_invalid_token.snap | 15 + ...set_invalid_token_with_existing_token.snap | 15 + ..._auth_set__via_stdin__set_valid_token.snap | 12 + ...__set_valid_token_with_existing_token.snap | 12 + ...h_set__via_tty_eod__set_invalid_token.snap | 14 + ...set_invalid_token_with_existing_token.snap | 14 + ...uth_set__via_tty_eod__set_valid_token.snap | 13 + ...__set_valid_token_with_existing_token.snap | 13 + ...th_set__via_tty_nl__set_invalid_token.snap | 13 + ...set_invalid_token_with_existing_token.snap | 13 + ...auth_set__via_tty_nl__set_valid_token.snap | 12 + ...__set_valid_token_with_existing_token.snap | 12 + ..._invoking_inside_of_installed_project.snap | 14 + ...de_of_project_with_no_installed_proxy.snap | 16 + ..._proxies__invoking_outside_of_project.snap | 15 + ...ean_deletes_only_unused_installations.snap | 16 + ...y_unused_installations_also_from_disk.snap | 16 + .../snapshots/cli__clean__help_message.snap | 18 + ...ions_from_disk_that_do_not_have_state.snap | 14 + ...lled_toolchain_should_not_throw_error.snap | 13 + .../snapshots/cli__install__help_message.snap | 19 + .../snapshots/cli__remove__help_message.snap | 19 + ...letes_only_manifest_from_list_and_dir.snap | 12 + .../tests/snapshots/cli__root__no_args.snap | 27 + .../snapshots/cli__root__version_flags.snap | 15 + .../snapshots/cli__run__help_message.snap | 22 + ...simple_run_command_manifest_not_found.snap | 13 + ...n__simple_run_command_missing_package.snap | 15 + .../snapshots/cli__which__help_message.snap | 22 + ...ich__which_run_binary_does_not_exists.snap | 15 + .../cli__which__which_run_binary_exists.snap | 13 + crates/criticalup-core/Cargo.toml | 24 + crates/criticalup-core/build.rs | 9 + crates/criticalup-core/src/binary_proxies.rs | 347 +++ crates/criticalup-core/src/config/mod.rs | 86 + crates/criticalup-core/src/config/paths.rs | 173 ++ .../src/download_server_client.rs | 283 ++ crates/criticalup-core/src/errors.rs | 154 + crates/criticalup-core/src/lib.rs | 12 + .../src/project_manifest/mod.rs | 738 +++++ .../src/project_manifest/substitutions.rs | 104 + .../src/project_manifest/v1.rs | 18 + crates/criticalup-core/src/state.rs | 980 +++++++ crates/criticalup-core/src/test_utils.rs | 230 ++ crates/criticalup-core/src/utils.rs | 78 + crates/criticalup-dev/Cargo.toml | 11 + crates/criticalup-dev/README.md | 47 + crates/criticalup-dev/src/main.rs | 30 + crates/criticalup/Cargo.toml | 18 + crates/criticalup/README.md | 47 + crates/criticalup/src/main.rs | 30 + crates/criticalup/wix/main.wxs | 238 ++ crates/mock-download-server/Cargo.toml | 11 + crates/mock-download-server/src/handlers.rs | 100 + crates/mock-download-server/src/lib.rs | 66 + crates/mock-download-server/src/server.rs | 75 + docs/.flake8 | 6 + docs/.gitignore | 10 + docs/.gitmodules | 3 + docs/LICENSES/Apache-2.0.txt | 73 + docs/LICENSES/MIT.txt | 9 + docs/README.rst | 77 + docs/exts/.gitkeep | 0 docs/make.py | 16 + docs/sphinx-substitutions.toml | 15 + docs/src/cli.rst | 4 + docs/src/conf.py | 62 + docs/src/criticalup_toml.rst | 4 + docs/src/index.rst | 40 + docs/src/install.rst | 49 + docs/src/overview.rst | 22 + docs/src/platforms.rst | 42 + docs/src/system-requirements.rst | 18 + docs/src/using-criticalup/authenticating.rst | 43 + docs/src/using-criticalup/running-tools.rst | 46 + .../using-criticalup/toolchain-management.rst | 89 + docs/target-names.toml | 18 + docs/themes/.gitkeep | 0 151 files changed, 14206 insertions(+) create mode 100644 .cargo/config.toml create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 .gitignore create mode 100644 .gitmodules create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 bors.toml create mode 100644 crates/criticaltrust/Cargo.toml create mode 100644 crates/criticaltrust/src/errors.rs create mode 100644 crates/criticaltrust/src/integrity/detect_manifest.rs create mode 100644 crates/criticaltrust/src/integrity/mod.rs create mode 100644 crates/criticaltrust/src/integrity/verifier.rs create mode 100644 crates/criticaltrust/src/keys/algorithms/ecdsa_p256_sha256_asn1_spki_der.rs create mode 100644 crates/criticaltrust/src/keys/algorithms/mod.rs create mode 100644 crates/criticaltrust/src/keys/mod.rs create mode 100644 crates/criticaltrust/src/keys/newtypes.rs create mode 100644 crates/criticaltrust/src/keys/pair.rs create mode 100644 crates/criticaltrust/src/keys/pair_aws_kms.rs create mode 100644 crates/criticaltrust/src/keys/pair_ephemeral.rs create mode 100644 crates/criticaltrust/src/keys/public.rs create mode 100644 crates/criticaltrust/src/lib.rs create mode 100644 crates/criticaltrust/src/manifests.rs create mode 100644 crates/criticaltrust/src/serde_base64.rs create mode 100644 crates/criticaltrust/src/sha256.rs create mode 100644 crates/criticaltrust/src/signatures/keychain.rs create mode 100644 crates/criticaltrust/src/signatures/mod.rs create mode 100644 crates/criticaltrust/src/signatures/payload.rs create mode 100644 crates/criticaltrust/src/test_utils.rs create mode 100644 crates/criticalup-cli/Cargo.toml create mode 100644 crates/criticalup-cli/README.md create mode 100644 crates/criticalup-cli/src/bin/criticalup-test.rs create mode 100644 crates/criticalup-cli/src/binary_proxies.rs create mode 100644 crates/criticalup-cli/src/commands/auth.rs create mode 100644 crates/criticalup-cli/src/commands/auth_remove.rs create mode 100644 crates/criticalup-cli/src/commands/auth_set.rs create mode 100644 crates/criticalup-cli/src/commands/clean.rs create mode 100644 crates/criticalup-cli/src/commands/install.rs create mode 100644 crates/criticalup-cli/src/commands/mod.rs create mode 100644 crates/criticalup-cli/src/commands/remove.rs create mode 100644 crates/criticalup-cli/src/commands/run.rs create mode 100644 crates/criticalup-cli/src/commands/which.rs create mode 100644 crates/criticalup-cli/src/errors.rs create mode 100644 crates/criticalup-cli/src/lib.rs create mode 100644 crates/criticalup-cli/src/spawn.rs create mode 100644 crates/criticalup-cli/tests/cli/auth.rs create mode 100644 crates/criticalup-cli/tests/cli/auth_remove.rs create mode 100644 crates/criticalup-cli/tests/cli/auth_set.rs create mode 100644 crates/criticalup-cli/tests/cli/binary_proxies.rs create mode 100644 crates/criticalup-cli/tests/cli/clean.rs create mode 100644 crates/criticalup-cli/tests/cli/install.rs create mode 100644 crates/criticalup-cli/tests/cli/main.rs create mode 100644 crates/criticalup-cli/tests/cli/remove.rs create mode 100644 crates/criticalup-cli/tests/cli/root.rs create mode 100644 crates/criticalup-cli/tests/cli/run.rs create mode 100644 crates/criticalup-cli/tests/cli/utils.rs create mode 100644 crates/criticalup-cli/tests/cli/which.rs create mode 100644 crates/criticalup-cli/tests/resources/criticalup-which.toml create mode 100644 crates/criticalup-cli/tests/resources/criticalup.toml create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth__invalid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth__no_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth__token_with_expiry.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth__token_without_expiry.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_remove__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_missing.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_present.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__byte_zero_via_stdin.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token_with_existing_token.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_installed_project.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_project_with_no_installed_proxy.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_outside_of_project.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations_also_from_disk.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__clean__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__clean__removes_unused_installations_from_disk_that_do_not_have_state.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__install__already_installed_toolchain_should_not_throw_error.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__remove__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__remove__remove_deletes_only_manifest_from_list_and_dir.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__root__no_args.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__root__version_flags.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__run__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_manifest_not_found.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_missing_package.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__which__help_message.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_does_not_exists.snap create mode 100644 crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_exists.snap create mode 100644 crates/criticalup-core/Cargo.toml create mode 100644 crates/criticalup-core/build.rs create mode 100644 crates/criticalup-core/src/binary_proxies.rs create mode 100644 crates/criticalup-core/src/config/mod.rs create mode 100644 crates/criticalup-core/src/config/paths.rs create mode 100644 crates/criticalup-core/src/download_server_client.rs create mode 100644 crates/criticalup-core/src/errors.rs create mode 100644 crates/criticalup-core/src/lib.rs create mode 100644 crates/criticalup-core/src/project_manifest/mod.rs create mode 100644 crates/criticalup-core/src/project_manifest/substitutions.rs create mode 100644 crates/criticalup-core/src/project_manifest/v1.rs create mode 100644 crates/criticalup-core/src/state.rs create mode 100644 crates/criticalup-core/src/test_utils.rs create mode 100644 crates/criticalup-core/src/utils.rs create mode 100644 crates/criticalup-dev/Cargo.toml create mode 100644 crates/criticalup-dev/README.md create mode 100644 crates/criticalup-dev/src/main.rs create mode 100644 crates/criticalup/Cargo.toml create mode 100644 crates/criticalup/README.md create mode 100644 crates/criticalup/src/main.rs create mode 100644 crates/criticalup/wix/main.wxs create mode 100644 crates/mock-download-server/Cargo.toml create mode 100644 crates/mock-download-server/src/handlers.rs create mode 100644 crates/mock-download-server/src/lib.rs create mode 100644 crates/mock-download-server/src/server.rs create mode 100644 docs/.flake8 create mode 100644 docs/.gitignore create mode 100644 docs/.gitmodules create mode 100644 docs/LICENSES/Apache-2.0.txt create mode 100644 docs/LICENSES/MIT.txt create mode 100644 docs/README.rst create mode 100644 docs/exts/.gitkeep create mode 100755 docs/make.py create mode 100644 docs/sphinx-substitutions.toml create mode 100644 docs/src/cli.rst create mode 100644 docs/src/conf.py create mode 100644 docs/src/criticalup_toml.rst create mode 100644 docs/src/index.rst create mode 100644 docs/src/install.rst create mode 100644 docs/src/overview.rst create mode 100644 docs/src/platforms.rst create mode 100644 docs/src/system-requirements.rst create mode 100644 docs/src/using-criticalup/authenticating.rst create mode 100644 docs/src/using-criticalup/running-tools.rst create mode 100644 docs/src/using-criticalup/toolchain-management.rst create mode 100644 docs/target-names.toml create mode 100644 docs/themes/.gitkeep diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..3c32d251 --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[target.aarch64-unknown-linux-gnu] +linker = "aarch64-linux-gnu-gcc" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..d6570d43 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,138 @@ +--- + +name: CI +on: + push: + branches: [staging, trying] + pull_request: {} + +permissions: + # Allow write access to the source code to enable GitHub Pages publishing. + contents: write + +jobs: + build-test: + name: Build and test + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + # Windows seems to have an existing non-RustUp tools in `~/.cargo/bin/` which have to be removed + - if: ${{ matrix.os == 'windows-latest' }} + shell: bash + run: | + rm -rv C://Users/runneradmin/.cargo/bin/rust-analyzer.exe + rm -rv C://Users/runneradmin/.cargo/bin/rustfmt.exe + rm -rv C://Users/runneradmin/.cargo/bin/cargo-fmt.exe + - name: Make sure Rust stable is installed + shell: bash + run: | + rustup update stable --no-self-update + rustup default stable + + - name: Cache Rust dependencies + uses: ferrous-systems/shared-github-actions/cache-rust@main + + - name: Check formatting + if: ${{ matrix.os == 'ubuntu-latest' }} + shell: bash + run: cargo fmt --all --check + + - name: Check Clippy warnings + shell: bash + run: cargo clippy --workspace --tests --locked -- -Dwarnings + + - name: Prepare file to record snapshots used by insta + shell: bash + run: echo "INSTA_SNAPSHOT_REFERENCES_FILE=$(mktemp)" >> "${GITHUB_ENV}" + + - name: Run the test suite + shell: bash + run: cargo test --timings --workspace --locked + + # Incompatible with Windows, insta snapshots output Windows paths + # Incompatible with Mac, find does not have `-n`, diff does not have `--color` + - if: ${{ matrix.os == 'ubuntu-latest' }} + name: Ensure there are no unused insta snapshots + shell: bash + run: diff -u --color <(find -name "*.snap" | xargs realpath | sort | uniq) <(cat "${INSTA_SNAPSHOT_REFERENCES_FILE}" | xargs realpath | sort | uniq) + + - name: Upload criticalup debug build + uses: actions/upload-artifact@v4 + with: + name: criticalup-${{ matrix.os }} + path: target/debug/criticalup + + - name: Upload upload criticalup-dev debug build + uses: actions/upload-artifact@v4 + with: + name: criticalup-dev-${{ matrix.os }} + path: target/debug/criticalup-dev + + - name: Upload cargo timings + uses: actions/upload-artifact@v4 + with: + name: cargo-timings-${{ matrix.os }} + path: target/cargo-timings/cargo-timing.html + + docs: + name: Build documentation + permissions: + contents: read + runs-on: ubuntu-22.04 + steps: + - name: Checkout the source code + uses: actions/checkout@v4 + + - name: Build documentation and check links + working-directory: ./docs + run: ./make.py --check-links + + - name: Install Python dependencies + working-directory: ./docs + run: python3 -m pip install reuse black flake8 + + - name: Verify Python code formatting + working-directory: ./docs + run: black . --check --diff --color + + - name: Lint Python code with flake8 + working-directory: ./docs + run: flake8 . --exclude .venv + + - name: Upload built documentation as an artifact + uses: actions/upload-artifact@v3 + with: + name: docs + path: docs/build/html + if-no-files-found: error + retention-days: 1 + + build-finished: + name: CI build successful + runs-on: ubuntu-latest + if: success() + needs: + - build-test + - docs + steps: + - name: Download built documentation for publishing + uses: actions/download-artifact@v3 + with: + name: docs + path: /tmp/docs + if: github.event_name == 'push' && github.ref == 'refs/heads/staging' + + - name: Publish the documentation to GitHub Pages + uses: ferrous-systems/shared-github-actions/github-pages@main + with: + path: /tmp/docs + token: ${{ secrets.GITHUB_TOKEN }} + if: github.event_name == 'push' && github.ref == 'refs/heads/staging' + + - name: Mark the build as successful + run: exit 0 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..7adcf7d3 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,275 @@ +# Copyright 2022-2024, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with cargo-dist (archives, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a GitHub Release +# +# Note that the GitHub Release will be created with a generated +# title/body based on your changelogs. + +name: Release + +permissions: + contents: write + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the announcement will be for that +# package (erroring out if it doesn't have the given version or isn't cargo-dist-able). +# +# If PACKAGE_NAME isn't specified, then the announcement will be for all +# (cargo-dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent announcement for each one. However, GitHub +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the release(s) +# will be marked as a prerelease. +on: + pull_request: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + +jobs: + # Run 'cargo dist plan' (or host) to determine what tasks we need to do + plan: + runs-on: ubuntu-latest + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + # we specify bash to get pipefail; it guards against the `curl` command + # failing. otherwise `sh` won't catch that `curl` returned non-0 + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.0-prerelease.7/cargo-dist-installer.sh | sh" + # sure would be cool if github gave us proper conditionals... + # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible + # functionality based on whether this is a pull_request, and whether it's from a fork. + # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* + # but also really annoying to build CI around when it needs secrets to work right.) + - id: plan + run: | + cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json + echo "cargo dist ran successfully" + cat plan-dist-manifest.json + echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + name: artifacts-plan-dist-manifest + path: plan-dist-manifest.json + + # Build and packages all the platform-specific things + build-local-artifacts: + name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) + # Let the initial task tell us to not run (currently very blunt) + needs: + - plan + if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by cargo-dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to cargo dist + # - install-dist: expression to run to install cargo-dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json + SSLDOTCOM_USERNAME: ${{ secrets.SSLDOTCOM_USERNAME }} + SSLDOTCOM_PASSWORD: ${{ secrets.SSLDOTCOM_PASSWORD }} + SSLDOTCOM_CREDENTIAL_ID: ${{ secrets.SSLDOTCOM_CREDENTIAL_ID }} + SSLDOTCOM_TOTP_SECRET: ${{ secrets.SSLDOTCOM_TOTP_SECRET }} + steps: + - name: enable windows longpaths + run: | + git config --global core.longpaths true + - uses: actions/checkout@v4 + with: + submodules: recursive + - uses: swatinem/rust-cache@v2 + with: + key: ${{ join(matrix.targets, '-') }} + - name: Install cargo-dist + run: ${{ matrix.install_dist }} + # Get the dist-manifest + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - name: Install dependencies + run: | + ${{ matrix.packages_install }} + - name: Build artifacts + run: | + # Actually do builds and make zips and whatnot + cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "cargo dist ran successfully" + - id: cargo-dist + name: Post-build + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. + shell: bash + run: | + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-local-${{ join(matrix.targets, '_') }} + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + + # Build and package all the platform-agnostic(ish) things + build-global-artifacts: + needs: + - plan + - build-local-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.0-prerelease.7/cargo-dist-installer.sh | sh" + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: cargo-dist + shell: bash + run: | + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-global + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + # Determines if we should publish/announce + host: + needs: + - plan + - build-local-artifacts + - build-global-artifacts + # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.host.outputs.manifest }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.0-prerelease.7/cargo-dist-installer.sh | sh" + # Fetch artifacts from scratch-storage + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + # This is a harmless no-op for GitHub Releases, hosting for that happens in "announce" + - id: host + shell: bash + run: | + cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json + echo "artifacts uploaded and released successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + # Overwrite the previous copy + name: artifacts-dist-manifest + path: dist-manifest.json + + # Create a GitHub Release while uploading all files to it + announce: + needs: + - plan + - host + # use "always() && ..." to allow us to wait for all publish jobs while + # still allowing individual publish jobs to skip themselves (for prereleases). + # "host" however must run to completion, no skipping allowed! + if: ${{ always() && needs.host.result == 'success' }} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + uses: ncipollo/release-action@v1 + with: + tag: ${{ needs.plan.outputs.tag }} + name: ${{ fromJson(needs.host.outputs.val).announcement_title }} + body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} + prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} + artifacts: "artifacts/*" diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..113c63cb --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +/target +/docs/node_modules +/docs/build +*.snap.new diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..8ffbb392 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "docs/shared"] + path = docs/shared + url = https://github.com/ferrocene/sphinx-shared-resources diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..4c33df05 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2601 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" + +[[package]] +name = "ascii" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" + +[[package]] +name = "aws-config" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2a89e0000cde82447155d64eeb71720b933b4396a6fbbebad3f8b4f88ca7b54" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "hex", + "http 0.2.12", + "hyper", + "ring 0.17.8", + "time", + "tokio", + "tracing", + "url", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-runtime" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4963ac9ff2d33a4231b3806c1c69f578f221a9cabb89ad2bde62ce2b442c8a7" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-kms" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1747213c6bb8fae0f388157e07e144fd442c1e28cfd9c4e257b1b6ee26c4a54" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32fcc572fd5c58489ec205ec3e4e5f7d63018898a485cbf922a462af496bc300" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b6275fa8684a1192754221173b1f7a7c1260d6b0571cc2b8af09468eb0cffe5" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30acd58272fd567e4853c5075d838be1626b59057e0249c9be5a1a7eb13bf70f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "http 0.2.12", + "once_cell", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d6f29688a4be9895c0ba8bef861ad0c0dac5c15e9618b9b7a6c233990fc263" +dependencies = [ + "aws-credential-types", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.1.0", + "once_cell", + "percent-encoding", + "sha2", + "time", + "tracing", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-http" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f10fa66956f01540051b0aa7ad54574640f748f9839e843442d99b970d3aff9" +dependencies = [ + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http-body 0.4.6", + "once_cell", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4683df9469ef09468dad3473d129960119a0d3593617542b7d52086c8486f2d6" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de34bcfa1fb3c82a80e252a753db34a6658e07f23d3a5b3fc96919518fa7a3f5" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "http-body 1.0.0", + "hyper", + "hyper-rustls", + "once_cell", + "pin-project-lite", + "pin-utils", + "rustls 0.21.10", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cc56a5c96ec741de6c5e6bf1ce6948be969d6506dfa9c39cffc284e31e4979b" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.1.0", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abe14dceea1e70101d38fbf2a99e6a34159477c0fb95e68e05c66bd7ae4c3729" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http 1.1.0", + "http-body 0.4.6", + "http-body 1.0.0", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "872c68cf019c0e4afc5de7753c4f7288ce4b71663212771bf5e4542eb9346ca9" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a43b56df2c529fe44cb4d92bd64d0479883fb9608ff62daede4df5405381814" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "http 0.2.12", + "rustc_version", + "tracing", +] + +[[package]] +name = "backtrace" +version = "0.3.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cc" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chunked_transfer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4de3bc4ea267985becf712dc6d9eed8b04c953b3fcfb339ebc87acd9804901" + +[[package]] +name = "clap" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "criticaltrust" +version = "0.4.0" +dependencies = [ + "aws-config", + "aws-sdk-kms", + "aws-smithy-runtime-api", + "base64", + "elliptic-curve", + "itertools", + "p256", + "rand_core", + "serde", + "serde_json", + "sha2", + "thiserror", + "time", + "tokio", +] + +[[package]] +name = "criticalup" +version = "0.1.0" +dependencies = [ + "criticaltrust", + "criticalup-cli", +] + +[[package]] +name = "criticalup-cli" +version = "0.0.0" +dependencies = [ + "atty", + "clap", + "criticaltrust", + "criticalup-core", + "insta", + "mock-download-server", + "owo-colors", + "regex", + "serde", + "serde_json", + "tar", + "tempfile", + "thiserror", + "winapi", + "windows-sys 0.52.0", + "xz2", +] + +[[package]] +name = "criticalup-core" +version = "0.0.0" +dependencies = [ + "criticaltrust", + "dirs", + "log", + "mock-download-server", + "nix", + "reqwest", + "serde", + "serde_json", + "sha2", + "tempfile", + "thiserror", + "toml_edit", +] + +[[package]] +name = "criticalup-dev" +version = "0.1.0" +dependencies = [ + "criticaltrust", + "criticalup-cli", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.48.0", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "either" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "pem-rfc7468", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "filetime" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "windows-sys 0.52.0", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-core", + "futures-io", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper", + "log", + "rustls 0.21.10", + "rustls-native-certs", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", +] + +[[package]] +name = "insta" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eab73f58e59ca6526037208f0e98851159ec1633cf17b6cd2e1f2c3fd5d53cc" +dependencies = [ + "console", + "lazy_static", + "linked-hash-map", + "regex", + "similar", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kstring" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b310ccceade8121d7d77fee406160e457c2f4e7c7982d589da3499bc7ea4526" +dependencies = [ + "serde", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.5.0", + "libc", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "lzma-sys" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "memchr" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "mock-download-server" +version = "0.0.0" +dependencies = [ + "anyhow", + "criticaltrust", + "serde", + "serde_json", + "tiny_http", +] + +[[package]] +name = "nix" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" +dependencies = [ + "bitflags 1.3.2", + "cc", + "cfg-if", + "libc", + "memoffset", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "owo-colors" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caff54706df99d2a78a5a4e3455ff45448d81ef1bb63c22cd14052ca0e993a3f" +dependencies = [ + "supports-color", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro2" +version = "1.0.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-lite" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b661b2f27137bdbc16f00eda72866a92bb28af1753ffbd56744fb6e2e9cd8e" + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 0.2.12", + "http-body 0.4.6", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls 0.21.10", + "rustls-native-certs", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +dependencies = [ + "bitflags 2.5.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" + +[[package]] +name = "serde" +version = "1.0.198" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.198" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.116" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "similar" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "socket2" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "supports-color" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89" +dependencies = [ + "is-terminal", + "is_ci", +] + +[[package]] +name = "syn" +version = "2.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tar" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "thiserror" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny_http" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82" +dependencies = [ + "ascii", + "chunked_transfer", + "httpdate", + "log", + "rustls 0.20.9", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.10", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml_edit" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744e9ed5b352340aa47ce033716991b5589e23781acb97cad37d4ea70560f55b" +dependencies = [ + "combine", + "indexmap 1.9.3", + "itertools", + "kstring", + "serde", +] + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "uuid" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "xattr" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" +dependencies = [ + "libc", + "linux-raw-sys", + "rustix", +] + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "xz2" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" +dependencies = [ + "lzma-sys", +] + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..8a7e3fbb --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,38 @@ +[workspace] +members = [ + "crates/criticaltrust", + "crates/criticalup-core", + "crates/criticalup-cli", + "crates/mock-download-server", + "crates/criticalup-dev", + "crates/criticalup", +] +resolver = "2" +exclude = ["docs/.linkchecker/src/tools/linkchecker"] + +# Config for 'cargo dist' +[workspace.metadata.dist] +# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.15.0-prerelease.7" +# CI backends to support +ci = "github" +# The installers to generate for each app +installers = ["shell", "powershell", "msi"] +# Target platforms to build apps for (Rust target-triple syntax) +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] +# Publish jobs to run in CI +pr-run-mode = "plan" +# Whether to install an updater program +install-updater = false +ssldotcom-windows-sign = "prod" + +[workspace.metadata.dist.dependencies.apt] +gcc-aarch64-linux-gnu = "*" +binutils-aarch64-linux-gnu = "*" +"g++-aarch64-linux-gnu" = "*" +libc6-dev-arm64-cross = "*" + +# The profile that 'cargo dist' will build with +[profile.dist] +inherits = "release" +lto = "thin" diff --git a/bors.toml b/bors.toml new file mode 100644 index 00000000..95391d51 --- /dev/null +++ b/bors.toml @@ -0,0 +1,12 @@ +# Gate PRs on the GitHub Actions workflow "CI" +status = ["CI build successful"] + +# Consider the build as failed if it takes more than half an hour to finish +timeout_sec = 1800 + +# Require at least 1 up to date approval before allowing `bors r+` +required_approvals = 1 +up_to_date_approvals = true + +# Remove the personal branches after they're merged +delete_merged_branches = true diff --git a/crates/criticaltrust/Cargo.toml b/crates/criticaltrust/Cargo.toml new file mode 100644 index 00000000..77936a36 --- /dev/null +++ b/crates/criticaltrust/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "criticaltrust" +version = "0.4.0" +edition = "2021" +license = "MIT OR Apache-2.0" +description = "Digital signatures implementation for CriticalUp" + +[dependencies] +base64 = "0.21.2" +elliptic-curve = { version = "0.13.5", features = ["pkcs8"] } +p256 = { version = "0.13.2", features = ["ecdsa-core"] } +rand_core = { version = "0.6.4", features = ["getrandom"] } +serde = { version = "1.0.136", features = ["derive"] } +serde_json = "1.0.79" +sha2 = "0.10.7" +thiserror = "1.0.30" +time = { version = "0.3.7", features = ["std", "serde", "serde-well-known"] } +aws-config = { version = "1.0.0", optional = true, features = ["rustls", "behavior-version-latest"] } +aws-sdk-kms = { version = "1.3.0", optional = true, features = ["rustls"] } +aws-smithy-runtime-api = { version = "1.0.0", optional = true } +tokio = { version = "1.29.1", features = ["rt", "rt-multi-thread"], optional = true } + +[dev-dependencies] +itertools = "0.10.3" + +[features] +aws-kms = ["aws-sdk-kms", "aws-config", "aws-smithy-runtime-api", "tokio"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] diff --git a/crates/criticaltrust/src/errors.rs b/crates/criticaltrust/src/errors.rs new file mode 100644 index 00000000..32aa20ab --- /dev/null +++ b/crates/criticaltrust/src/errors.rs @@ -0,0 +1,41 @@ +use crate::keys::KeyRole; +use thiserror::Error; + +#[non_exhaustive] +#[derive(Debug, Error)] +pub enum Error { + #[error("failed to sign data")] + SignatureFailed, + #[error("failed to verify signed data")] + VerificationFailed, + #[error("failed to generate a local key")] + LocalKeyGenerationFailed, + #[error("wrong key role for the trust root key (expected Root, found {0:?})")] + WrongKeyRoleForTrustRoot(KeyRole), + #[error("failed to serialize the contents of the signed payload")] + SignedPayloadSerializationFailed(#[source] serde_json::Error), + #[error("failed to deserialize verified data")] + DeserializationFailed(#[source] serde_json::Error), + #[error("failed to load key pair")] + InvalidKey(String), + #[error("unsupported key")] + UnsupportedKey, + #[cfg(feature = "aws-kms")] + #[error("failed to retrieve the public key from AWS KMS")] + AwsKmsFailedToGetPublicKey( + #[from] + aws_sdk_kms::error::SdkError< + aws_sdk_kms::operation::get_public_key::GetPublicKeyError, + aws_smithy_runtime_api::client::orchestrator::HttpResponse, + >, + ), + #[cfg(feature = "aws-kms")] + #[error("failed to sign data with AWS KMS")] + AwsKmsFailedToSign( + #[from] + aws_sdk_kms::error::SdkError< + aws_sdk_kms::operation::sign::SignError, + aws_smithy_runtime_api::client::orchestrator::HttpResponse, + >, + ), +} diff --git a/crates/criticaltrust/src/integrity/detect_manifest.rs b/crates/criticaltrust/src/integrity/detect_manifest.rs new file mode 100644 index 00000000..15070c48 --- /dev/null +++ b/crates/criticaltrust/src/integrity/detect_manifest.rs @@ -0,0 +1,223 @@ +use std::path; + +/// Checks whether the provided path is the expected path of a package manifest, and if so returns +/// the information about that manifest derived from the path. +/// +/// Package manifests are supposed to be located in `share/criticaltrust/${product}/${package}.json`. +pub(super) fn is_package_manifest(path: &str) -> Option> { + let mut iter = ReverseSegmentsIter::new(path); + + let package = iter + .next() + .and_then(|s| s.strip_suffix(".json")) + .filter(|s| !s.is_empty())?; + let product = iter.next()?; + iter.next().filter(|s| *s == "criticaltrust")?; + iter.next().filter(|s| *s == "share")?; + + Some(FoundPackageManifest { + package, + product, + prefix: iter.remaining(), + }) +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) struct FoundPackageManifest<'a> { + pub(super) package: &'a str, + pub(super) product: &'a str, + pub(super) prefix: Option<&'a str>, +} + +/// This iterator is similar to [`std::path::Components`], but makes some different choices: +/// +/// * It operates over `str` instead of `Path`, to avoid the roundtrip through `Path`. +/// +/// * It operates in the opposite direction, returning the rightmost segments first. This allows +/// the caller to stop as soon as the right path suffix is found and obtain the remainder. +/// +/// * It returns `None` whenever a segment is empty, even if the path is not fully consumed. This +/// allows short-circuiting with `?` in the caller whenever an expected segment is empty. +/// +/// * When calling `remaining`, it includes the trailing slash instead of omitting it. This allows +/// the rest of the code to join the prefix/remainder without having to insert an extra slash +/// (which could be different depending on the platform). +/// +struct ReverseSegmentsIter<'a> { + first: bool, + cursor: &'a str, +} + +impl<'a> ReverseSegmentsIter<'a> { + fn new(input: &'a str) -> Self { + Self { + first: true, + cursor: input, + } + } + + fn remaining(&self) -> Option<&'a str> { + none_if_empty(self.cursor) + } +} + +impl<'a> Iterator for ReverseSegmentsIter<'a> { + type Item = &'a str; + + fn next(&mut self) -> Option { + let result = loop { + // The `remaining` method needs to preserve the slash at the end, so we can't remove it + // from the cursor. The code here can't have it though, otherwise when finding the + // rightmost slash it will immediately find it. To avoid the problem, when accessing the + // cursor slice inside this function we ignore the last byte if it's not the first segment. + let end = self.cursor.len().saturating_sub(!self.first as usize); + self.first = false; + + if let Some(pos) = self.cursor[..end].rfind(path::is_separator) { + let (new_cursor, result) = self.cursor[..end].split_at(pos + 1); + self.cursor = new_cursor; + if !result.is_empty() { + break result; + } + } else { + let result = &self.cursor[..end]; + self.cursor = ""; + break result; + } + }; + + none_if_empty(result) + } +} + +fn none_if_empty(input: &str) -> Option<&str> { + if input.is_empty() { + None + } else { + Some(input) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_is_package_manifest() { + const CASES: &[(&str, Option)] = &[ + ( + "share/criticaltrust/product/package.json", + Some(FoundPackageManifest { + package: "package", + product: "product", + prefix: None, + }), + ), + ( + "share/criticaltrust/other-product/other-package.json", + Some(FoundPackageManifest { + package: "other-package", + product: "other-product", + prefix: None, + }), + ), + ( + "product/share/criticaltrust/product/package.json", + Some(FoundPackageManifest { + package: "package", + product: "product", + prefix: Some("product/"), + }), + ), + ( + "/usr/share/criticaltrust/product/package.json", + Some(FoundPackageManifest { + package: "package", + product: "product", + prefix: Some("/usr/"), + }), + ), + ( + "/usr/local/share/criticaltrust/product/package.json", + Some(FoundPackageManifest { + package: "package", + product: "product", + prefix: Some("/usr/local/"), + }), + ), + ( + "/home/pietro/.criticalup/files/share/criticaltrust/product/package.json", + Some(FoundPackageManifest { + package: "package", + product: "product", + prefix: Some("/home/pietro/.criticalup/files/"), + }), + ), + // Missing segments + ("criticaltrust/product/package.json", None), + ("share/product/package.json", None), + ("share/criticaltrust/package.json", None), + ("share/criticaltrust/product", None), + // Wrong extension + ("share/criticaltrust/product/package.toml", None), + // Missing extension + ("share/criticaltrust/product/package", None), + // Only extension + ("share/criticaltrust/product/.json", None), + // Empty product name + ("share/criticaltrust//package.json", None), + ]; + + for (case, expected) in CASES { + assert_eq!(*expected, is_package_manifest(case), "\n case: `{case}`\n"); + } + } + + #[test] + fn test_reverse_segments_iter_relative() { + let mut iter = ReverseSegmentsIter::new("foo/ba\\r//baz.json"); + assert_eq!(Some("foo/ba\\r//baz.json"), iter.remaining()); + + assert_eq!(Some("baz.json"), iter.next()); + assert_eq!(Some("foo/ba\\r//"), iter.remaining()); + + #[cfg(unix)] + assert_eq!(Some("ba\\r"), iter.next()); + #[cfg(windows)] // Backslashes exist as legitimate path specifiers on Windows + { + assert_eq!(Some("r"), iter.next()); + assert_eq!(Some("ba"), iter.next()); + } + assert_eq!(Some("foo/"), iter.remaining()); + + assert_eq!(Some("foo"), iter.next()); + assert_eq!(None, iter.remaining()); + + assert_eq!(None, iter.next()); + assert_eq!(None, iter.remaining()); + } + + #[test] + fn test_reverse_segments_iter_absolute() { + let mut iter = ReverseSegmentsIter::new("/foo/ba\\r//baz.json"); + assert_eq!(Some("/foo/ba\\r//baz.json"), iter.remaining()); + + assert_eq!(Some("baz.json"), iter.next()); + assert_eq!(Some("/foo/ba\\r//"), iter.remaining()); + + #[cfg(unix)] + assert_eq!(Some("ba\\r"), iter.next()); + #[cfg(windows)] // Backslashes exist as legitimate path specifiers on Windows + { + assert_eq!(Some("r"), iter.next()); + assert_eq!(Some("ba"), iter.next()); + } + assert_eq!(Some("/foo/"), iter.remaining()); + + assert_eq!(Some("foo"), iter.next()); + assert_eq!(Some("/"), iter.remaining()); + + assert_eq!(None, iter.next()); + assert_eq!(None, iter.remaining()); + } +} diff --git a/crates/criticaltrust/src/integrity/mod.rs b/crates/criticaltrust/src/integrity/mod.rs new file mode 100644 index 00000000..a0846dbd --- /dev/null +++ b/crates/criticaltrust/src/integrity/mod.rs @@ -0,0 +1,47 @@ +//! High-level interface to verify the integrity of archives and installations. + +mod detect_manifest; +mod verifier; + +pub use verifier::{IntegrityVerifier, VerifiedPackage}; + +/// Integrity error detected by [`IntegrityVerifier`]. +#[derive(Debug, thiserror::Error)] +pub enum IntegrityError { + #[error("failed to deserialize the package manifest at {path}")] + PackageManifestDeserialization { + path: String, + #[source] + inner: serde_json::Error, + }, + #[error("failed to verify the package manifest at {path}")] + PackageManifestVerification { + path: String, + #[source] + inner: crate::Error, + }, + #[error("wrong POSIX permissions for {path} (expected: {expected:o}, found {found:o})")] + WrongPosixPermissions { + path: String, + expected: u32, + found: u32, + }, + #[error("wrong checksum for {path}")] + WrongChecksum { path: String }, + #[error("the product name of {path} is not {expected} (the file path is wrong)")] + WrongProductName { path: String, expected: String }, + #[error("the package name of {path} is not {expected} (the file path is wrong)")] + WrongPackageName { path: String, expected: String }, + #[error("no package manifest found")] + NoPackageManifestFound, + #[error("expected file {path} is not present")] + MissingFile { path: String }, + #[error("unexpected file {path} is present")] + UnexpectedFile { path: String }, + #[error("unexpected file {path} in prefix managed by CriticalUp ({prefix})")] + UnexpectedFileInManagedPrefix { path: String, prefix: String }, + #[error("file {path} is referenced by multiple package manifests")] + FileReferencedByMultipleManifests { path: String }, + #[error("file {path} was loaded multiple times")] + FileLoadedMultipleTimes { path: String }, +} diff --git a/crates/criticaltrust/src/integrity/verifier.rs b/crates/criticaltrust/src/integrity/verifier.rs new file mode 100644 index 00000000..7de62ea0 --- /dev/null +++ b/crates/criticaltrust/src/integrity/verifier.rs @@ -0,0 +1,913 @@ +use crate::integrity::detect_manifest::{is_package_manifest, FoundPackageManifest}; +use crate::integrity::IntegrityError; +use crate::manifests::{PackageFile, PackageManifest}; +use crate::sha256::hash_sha256; +use crate::signatures::Keychain; +use std::collections::{BTreeMap, HashMap, HashSet}; + +/// Verify the integrity of a CriticalUp archive or installation. +/// +/// The verifier does **no I/O**: instead it allows verifying individual files incrementally, and +/// delegates I/O to the caller. This allows verifying an archive on the fly as it's being +/// downloaded, in addition to verifying the contents of a directory in the filesystem. +/// +/// The verifier will collect and return all the integrity errors it finds, instead of +/// short-circuiting at the first encountered error. The list of errors is only accessible after +/// [`verify`](IntegrityVerifier::verify) is called. +pub struct IntegrityVerifier<'a> { + keychain: &'a Keychain, + errors: Vec, + verified_packages: Vec, + allow_external_files: bool, + + managed_prefixes: HashSet, + loaded_files: HashSet, + referenced_by_manifests_but_missing: HashMap, + added_but_not_referenced_by_manifests: HashMap, +} + +impl<'a> IntegrityVerifier<'a> { + /// Create a new verifier instance, using the provided keyring to verify the digital signatures + /// of the package manifests. + pub fn new(keychain: &'a Keychain) -> Self { + Self { + keychain, + errors: Vec::new(), + verified_packages: Vec::new(), + allow_external_files: false, + + managed_prefixes: HashSet::new(), + loaded_files: HashSet::new(), + referenced_by_manifests_but_missing: HashMap::new(), + added_but_not_referenced_by_manifests: HashMap::new(), + } + } + + /// Decide whether all files must be referenced by a verified manifest, or "external" files not + /// referenced by any manifest are allowed. By default, external files are **not** allowed. + /// + /// Note that even when external files are allowed, they must not be present in path prefixes a + /// manifest considers managed exclusively by CriticalUp, and errors will be returned in those + /// cases. + /// + /// This setting can be toggled at any point before calling + /// [`verify`](IntegrityVerifier::verify), as the checks for external files are performed in + /// that method. + pub fn allow_external_files(&mut self, allow: bool) { + self.allow_external_files = allow; + } + + /// Include the provided path and contents in the files pending verification. + /// + /// The order in which files are added does not matter, but the same file can't be added twice. + /// The verifier will not store in memory the contents of the file, but it will keep track of + /// the metadata potentially until [`verify`](IntegrityVerifier::verify) is called. + pub fn add(&mut self, path: &str, mode: u32, contents: &[u8]) { + if !self.loaded_files.insert(path.into()) { + self.errors + .push(IntegrityError::FileLoadedMultipleTimes { path: path.into() }); + return; + } + + if let Some(found) = is_package_manifest(path) { + if let Err(err) = self.add_package_manifest(path, &found, contents) { + self.errors.push(err); + } + } else { + let entry = FoundFile { + mode, + sha256: hash_sha256(contents), + }; + + if let Some(manifest) = self.referenced_by_manifests_but_missing.remove(path) { + self.verify_file(path, &manifest, &entry); + } else { + self.added_but_not_referenced_by_manifests + .insert(path.into(), entry); + } + } + } + + /// Perform the final checks and return the outcome of the verification. The method either + /// returns all the packages it successfully verified, or if any error occured during + /// verification it will return all encountered errors. + pub fn verify(mut self) -> Result, Vec> { + if self.verified_packages.is_empty() { + self.errors.push(IntegrityError::NoPackageManifestFound); + } + + for path in self.referenced_by_manifests_but_missing.into_keys() { + self.errors.push(IntegrityError::MissingFile { path }); + } + + for path in self.added_but_not_referenced_by_manifests.into_keys() { + if self.allow_external_files { + for prefix in &self.managed_prefixes { + if path.starts_with(prefix) { + self.errors + .push(IntegrityError::UnexpectedFileInManagedPrefix { + path, + prefix: prefix.clone(), + }); + break; + } + } + } else { + self.errors.push(IntegrityError::UnexpectedFile { path }); + } + } + + if self.errors.is_empty() { + Ok(self.verified_packages) + } else { + Err(self.errors) + } + } + + fn add_package_manifest( + &mut self, + path: &str, + found: &FoundPackageManifest, + contents: &[u8], + ) -> Result<(), IntegrityError> { + let manifest = serde_json::from_slice::(contents) + .map_err(|e| IntegrityError::PackageManifestDeserialization { + path: path.into(), + inner: e, + })? + .signed + .into_verified(self.keychain) + .map_err(|e| IntegrityError::PackageManifestVerification { + path: path.into(), + inner: e, + })?; + + if found.product != manifest.product { + return Err(IntegrityError::WrongProductName { + path: path.into(), + expected: manifest.product, + }); + } + if found.package != manifest.package { + return Err(IntegrityError::WrongPackageName { + path: path.into(), + expected: manifest.package, + }); + } + + let mut proxies_paths = BTreeMap::new(); + let prefix = found.prefix.map(String::from).unwrap_or_default(); + for file in manifest.files { + let file_path = prefix.clone() + &file.path; + + if file.needs_proxy { + let proxy_name = file_path + .rsplit_once('/') + .map(|(_dir, name)| name) + .unwrap_or(&file_path); + proxies_paths.insert(proxy_name.into(), file_path.clone()); + } + + if let Some(found) = self + .added_but_not_referenced_by_manifests + .remove(&file_path) + { + self.verify_file(&file_path, &file, &found); + } else if self.loaded_files.contains(&file_path) + || self + .referenced_by_manifests_but_missing + .insert(file_path.clone(), file) + .is_some() + { + self.errors + .push(IntegrityError::FileReferencedByMultipleManifests { path: file_path }); + } + } + + for managed_prefix in manifest.managed_prefixes { + self.managed_prefixes + .insert(prefix.clone() + &managed_prefix); + } + + self.verified_packages.push(VerifiedPackage { + product: manifest.product, + package: manifest.package, + proxies_paths, + }); + + Ok(()) + } + + fn verify_file(&mut self, path: &str, manifest: &PackageFile, actual: &FoundFile) { + if manifest.posix_mode != actual.mode { + self.errors.push(IntegrityError::WrongPosixPermissions { + path: path.into(), + expected: manifest.posix_mode, + found: actual.mode, + }); + } + if manifest.sha256 != actual.sha256 { + self.errors + .push(IntegrityError::WrongChecksum { path: path.into() }); + } + } +} + +/// Information about a package verified by [`IntegrityVerifier`]. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Hash)] +pub struct VerifiedPackage { + /// Name of the product this package belongs to. + pub product: String, + /// Name of the package. + pub package: String, + /// List of the paths of all binaries that need a proxy. + pub proxies_paths: BTreeMap, +} + +struct FoundFile { + mode: u32, + sha256: Vec, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::keys::{EphemeralKeyPair, KeyRole}; + use crate::manifests::{ManifestVersion, Package}; + use crate::signatures::SignedPayload; + use crate::test_utils::TestEnvironment; + use crate::Error; + use itertools::Itertools; + use std::borrow::Cow; + + // Note that the tests verify all possible permutations of input files, ensuring the expected + // behavior regardless of the order files are provided to the verifier. + + const BIN_A: TestFile = TestFile::new("bin/a", 0o755, b"foo binary"); + const BIN_B: TestFile = TestFile::new("bin/b", 0o755, b"bar binary"); + const SHARE_A: TestFile = TestFile::new("share/a", 0o644, b"a file"); + const SHARE_B: TestFile = TestFile::new("share/b", 0o644, b"b file"); + + macro_rules! btreemap { + ($($key:expr => $value:expr),*$(,)?) => {{ + let mut map = BTreeMap::new(); + $(map.insert($key.into(), $value.into());)* + map + }} + } + + macro_rules! errors { + ($($pat:pat $(if $if:expr)?),*$(,)?) => {{ + let errors: &[(&str, fn(&IntegrityError) -> bool)] = &[$( + ( + stringify!($pat $(if $if)?), + |error| match error { + $pat $(if $if)? => true, + _ => false, + }, + ), + )*]; + errors + }} + } + + #[test] + fn test_no_manifests() { + IntegrityTest::new().assert_errors(errors![IntegrityError::NoPackageManifestFound]); + } + + #[test] + fn test_one_manifest_with_files() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&SHARE_A)) + .file(&BIN_A) + .file(&SHARE_A) + .assert_verified(&[("a", "b")]); + } + + #[test] + fn test_one_manifest_with_files_in_a_prefix() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .file(&SHARE_A) + .prefix("foo/"), + ) + .file(&BIN_A.prefix("foo/")) + .file(&SHARE_A.prefix("foo/")) + .assert_verified(&[("a", "b")]); + } + + #[test] + fn test_multiple_manifests_in_different_prefixes() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .file(&SHARE_A) + .prefix("foo/"), + ) + .manifest( + ManifestBuilder::new("a", "c") + .file(&BIN_A) + .file(&SHARE_A) + .prefix("bar/"), + ) + .file(&BIN_A.prefix("foo/")) + .file(&BIN_A.prefix("bar/")) + .file(&SHARE_A.prefix("foo/")) + .file(&SHARE_A.prefix("bar/")) + .assert_verified(&[("a", "b"), ("a", "c")]); + } + + #[test] + fn test_multiple_manifests_in_the_same_prefix() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&SHARE_A)) + .manifest(ManifestBuilder::new("a", "c").file(&BIN_B).file(&SHARE_B)) + .file(&BIN_A) + .file(&BIN_B) + .file(&SHARE_A) + .file(&SHARE_B) + .assert_verified(&[("a", "b"), ("a", "c")]); + } + + #[test] + fn test_manifest_nested_inside_other_manifest() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&SHARE_A)) + .manifest( + ManifestBuilder::new("a", "c") + .file(&BIN_A) + .file(&SHARE_A) + .prefix("share/foo/"), + ) + .file(&BIN_A) + .file(&BIN_A.prefix("share/foo/")) + .file(&SHARE_A.prefix("share/foo/")) + .file(&SHARE_A) + .assert_verified(&[("a", "b"), ("a", "c")]); + } + + #[test] + fn test_same_file_in_multiple_manifests() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .manifest(ManifestBuilder::new("a", "c").file(&BIN_A).file(&BIN_B)) + .file(&BIN_A) + .file(&BIN_B) + .assert_errors(errors![ + IntegrityError::FileReferencedByMultipleManifests { path } if path == "bin/a", + ]); + } + + #[test] + fn test_files_with_wrong_checksum() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .file(&BIN_B) + .file(&SHARE_A), + ) + .file(&BIN_A.add_content(b"!")) + .file(&BIN_B.add_content(b"!")) + .file(&SHARE_A) + .assert_errors(errors![ + IntegrityError::WrongChecksum { path } if path == "bin/a", + IntegrityError::WrongChecksum { path } if path == "bin/b", + ]); + } + + #[test] + fn test_files_with_wrong_mode() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .file(&BIN_B) + .file(&SHARE_A), + ) + .file(&BIN_A.mode(0o644)) + .file(&BIN_B.mode(0o644)) + .file(&SHARE_A) + .assert_errors(errors![ + IntegrityError::WrongPosixPermissions { + path, + expected: 0o755, + found: 0o644, + } if path == "bin/a", + IntegrityError::WrongPosixPermissions { + path, + expected: 0o755, + found: 0o644, + } if path == "bin/b", + ]); + } + + #[test] + fn test_files_with_both_wrong_mode_and_wrong_checksum() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&BIN_B)) + .file(&BIN_A.add_content(b"!").mode(0o644)) + .file(&BIN_B) + .assert_errors(errors![ + IntegrityError::WrongPosixPermissions { + path, + expected: 0o755, + found: 0o644, + } if path == "bin/a", + IntegrityError::WrongChecksum { path } if path == "bin/a", + ]); + } + + #[test] + fn test_mismatched_product_name() { + IntegrityTest::new() + .manifest_in( + "share/criticaltrust/z/b.json", + ManifestBuilder::new("a", "b").file(&BIN_A), + ) + .file(&BIN_A) + .assert_errors(errors![ + IntegrityError::WrongProductName { path, expected } + if expected == "a" && path == "share/criticaltrust/z/b.json", + // The manifest is completely ignored, resulting in more errors. + IntegrityError::NoPackageManifestFound, + IntegrityError::UnexpectedFile { path } if path == "bin/a", + ]); + } + + #[test] + fn test_mismatched_package_name() { + IntegrityTest::new() + .manifest_in( + "share/criticaltrust/a/z.json", + ManifestBuilder::new("a", "b").file(&BIN_A), + ) + .file(&BIN_A) + .assert_errors(errors![ + IntegrityError::WrongPackageName { path, expected } + if expected == "b" && path == "share/criticaltrust/a/z.json", + // The manifest is completely ignored, resulting in more errors. + IntegrityError::NoPackageManifestFound, + IntegrityError::UnexpectedFile { path } if path == "bin/a", + ]); + } + + #[test] + fn test_files_not_in_manifest() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .file(&BIN_A) + .file(&SHARE_A) + .assert_errors(errors![ + IntegrityError::UnexpectedFile { path } if path == "share/a", + ]); + } + + #[test] + fn test_files_in_manifest_not_present() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&SHARE_A)) + .file(&BIN_A) + .assert_errors(errors![ + IntegrityError::MissingFile { path } if path == "share/a", + ]); + } + + #[test] + fn test_untrusted_manifest() { + // This key is not trusted by the keychain created by IntegrityTest. + let key = EphemeralKeyPair::generate( + crate::keys::KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + KeyRole::Packages, + None, + ) + .unwrap(); + + IntegrityTest::new() + .file(&TestFile { + path: "share/criticaltrust/a/b.json".into(), + mode: 0o644, + contents: ManifestBuilder::new("a", "b") + .file(&BIN_A) + .finish(&key) + .into(), + needs_proxy: false, + }) + .assert_errors(errors![ + IntegrityError::PackageManifestVerification { + path, + inner: Error::VerificationFailed, + } if path == "share/criticaltrust/a/b.json", + // No valid one was found: + IntegrityError::NoPackageManifestFound, + ]); + } + + #[test] + fn test_invalid_json_in_manifest() { + IntegrityTest::new() + .file(&TestFile::new( + "share/criticaltrust/a/b.json", + 0o644, + b"{not valid json}", + )) + .assert_errors(errors![ + IntegrityError::PackageManifestDeserialization { path, inner } + if path == "share/criticaltrust/a/b.json" && inner.is_syntax(), + // No valid one was found: + IntegrityError::NoPackageManifestFound, + ]); + } + + #[test] + fn test_unprefixed_manifest_with_prefixed_files() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).file(&SHARE_A)) + .file(&BIN_A.prefix("foo/")) + .file(&SHARE_A.prefix("foo/")) + .assert_errors(errors![ + IntegrityError::MissingFile { path } if path == "bin/a", + IntegrityError::MissingFile { path } if path == "share/a", + IntegrityError::UnexpectedFile { path } if path == "foo/bin/a", + IntegrityError::UnexpectedFile { path } if path == "foo/share/a", + ]); + } + + #[test] + fn test_prefixed_manifest_with_unprefixed_files() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .file(&SHARE_A) + .prefix("foo/"), + ) + .file(&BIN_A) + .file(&SHARE_A) + .assert_errors(errors![ + IntegrityError::MissingFile { path } if path == "foo/bin/a", + IntegrityError::MissingFile { path } if path == "foo/share/a", + IntegrityError::UnexpectedFile { path } if path == "bin/a", + IntegrityError::UnexpectedFile { path } if path == "share/a", + ]); + } + + #[test] + fn test_file_loaded_multiple_times() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .file(&BIN_A) + .file(&BIN_A) + .assert_errors(errors![ + IntegrityError::FileLoadedMultipleTimes { path } if path == "bin/a", + ]); + } + + #[test] + fn test_manifest_loaded_multiple_times() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .file(&BIN_A) + .assert_errors(errors![ + IntegrityError::FileLoadedMultipleTimes { path } + if path == "share/criticaltrust/a/b.json" + ]); + } + + #[test] + fn test_collecting_needs_proxy_binaries() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A.needs_proxy()) + .file(&BIN_B), + ) + .file(&BIN_A) + .file(&BIN_B) + .assert_verified(&[VerifiedPackage { + product: "a".into(), + package: "b".into(), + proxies_paths: btreemap! {"a" => "bin/a"}, + }]); + } + + #[test] + fn test_collecting_needs_proxy_binaries_inside_a_prefix() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A.needs_proxy()) + .file(&BIN_B) + .prefix("foo/"), + ) + .file(&BIN_A.prefix("foo/")) + .file(&BIN_B.prefix("foo/")) + .assert_verified(&[VerifiedPackage { + product: "a".into(), + package: "b".into(), + proxies_paths: btreemap! {"a" => "foo/bin/a"}, + }]); + } + + #[test] + fn test_allowing_external_files() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A)) + .file(&BIN_A) + .file(&BIN_B) + .allow_external_files() + .assert_verified(&[("a", "b")]); + } + + #[test] + fn test_allowing_external_files_in_managed_prefixes() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .managed_prefix("bin/"), + ) + .file(&BIN_A) + .file(&BIN_B) + .allow_external_files() + .assert_errors(errors![ + IntegrityError::UnexpectedFileInManagedPrefix { path, prefix } + if path == "bin/b" && prefix == "bin/" + ]); + } + + #[test] + fn test_allowing_external_files_in_managed_prefixes_inside_a_prefix() { + IntegrityTest::new() + .manifest( + ManifestBuilder::new("a", "b") + .file(&BIN_A) + .managed_prefix("bin/") + .prefix("foo/"), + ) + .file(&BIN_A.prefix("foo/")) + .file(&BIN_B.prefix("foo/")) + .file(&BIN_A) + .allow_external_files() + .assert_errors(errors![ + IntegrityError::UnexpectedFileInManagedPrefix { path, prefix } + if path == "foo/bin/b" && prefix == "foo/bin/" + ]); + } + + #[test] + fn test_allowing_external_files_inside_a_prefix() { + IntegrityTest::new() + .manifest(ManifestBuilder::new("a", "b").file(&BIN_A).prefix("foo/")) + .file(&BIN_A.prefix("foo/")) + .file(&BIN_B.prefix("foo/")) + .file(&BIN_A) + .allow_external_files() + .assert_verified(&[("a", "b")]); + } + + #[derive(Clone)] + struct TestFile { + path: Cow<'static, str>, + mode: u32, + contents: Cow<'static, [u8]>, + needs_proxy: bool, + } + + impl TestFile { + const fn new(path: &'static str, mode: u32, contents: &'static [u8]) -> Self { + Self { + path: Cow::Borrowed(path), + mode, + contents: Cow::Borrowed(contents), + needs_proxy: false, + } + } + + fn prefix(mut self, prefix: &str) -> Self { + let path = self.path.into_owned(); + self.path = Cow::Owned(format!("{prefix}{path}")); + self + } + + fn mode(mut self, new: u32) -> Self { + self.mode = new; + self + } + + fn needs_proxy(mut self) -> Self { + self.needs_proxy = true; + self + } + + fn add_content(mut self, extra: &[u8]) -> Self { + let mut contents = self.contents.into_owned(); + contents.extend_from_slice(extra); + self.contents = Cow::Owned(contents); + self + } + } + + type ErrorMatcher = fn(&IntegrityError) -> bool; + + struct ManifestBuilder { + manifest: Package, + prefix: String, + } + + impl ManifestBuilder { + fn new(product: &str, package: &str) -> Self { + Self { + manifest: Package { + product: product.into(), + package: package.into(), + commit: String::new(), + files: Vec::new(), + managed_prefixes: Vec::new(), + }, + prefix: String::new(), + } + } + + fn managed_prefix(mut self, prefix: &str) -> Self { + self.manifest.managed_prefixes.push(prefix.into()); + self + } + + fn file(mut self, file: &TestFile) -> Self { + self.manifest.files.push(PackageFile { + path: file.path.as_ref().into(), + posix_mode: file.mode, + sha256: hash_sha256(&file.contents), + needs_proxy: file.needs_proxy, + }); + self + } + + fn prefix(mut self, prefix: &str) -> Self { + self.prefix = prefix.into(); + self + } + + fn finish(self, key: &EphemeralKeyPair) -> Vec { + let mut signed = SignedPayload::new(&self.manifest).unwrap(); + signed.add_signature(key).unwrap(); + + serde_json::to_vec(&PackageManifest { + version: ManifestVersion, + signed, + }) + .unwrap() + } + } + + struct IntegrityTest { + env: TestEnvironment, + key: EphemeralKeyPair, + allow_external_files: bool, + files: Vec, + } + + impl IntegrityTest { + fn new() -> Self { + let mut env = TestEnvironment::prepare(); + let key = env.create_key(KeyRole::Packages); + Self { + env, + key, + allow_external_files: false, + files: Vec::new(), + } + } + + fn allow_external_files(mut self) -> Self { + self.allow_external_files = true; + self + } + + fn file(mut self, file: &TestFile) -> Self { + self.files.push(file.clone()); + self + } + + fn manifest(self, builder: ManifestBuilder) -> Self { + self.manifest_in( + &format!( + "{}share/criticaltrust/{}/{}.json", + builder.prefix, builder.manifest.product, builder.manifest.package + ), + builder, + ) + } + + fn manifest_in(mut self, path: &str, builder: ManifestBuilder) -> Self { + self.files.push(TestFile { + path: path.to_string().into(), + mode: 0o644, + contents: builder.finish(&self.key).into(), + needs_proxy: false, + }); + self + } + + #[track_caller] + fn assert_verified(self, found: &[impl AsVerifiedPackage]) { + let mut expected = found + .iter() + .map(|vp| vp.as_verified_package()) + .collect::>(); + expected.sort(); + + self.permutations(|result| { + let mut result = result.unwrap(); + result.sort(); + + assert_eq!(result, expected); + }) + } + + #[track_caller] + fn assert_errors(self, matchers: &[(&str, ErrorMatcher)]) { + self.permutations(|result| { + let mut matchers = matchers.iter().map(Some).collect::>(); + + let result = result.unwrap_err(); + for error in &result { + let mut matched = false; + for matcher in &mut matchers { + if let Some((_, m)) = matcher { + if m(error) { + matched = true; + *matcher = None; + break; + } + } + } + if !matched { + panic!( + "\n\nreturned errors: {result:?}\n\ + -> found error not matching any pattern: {error:?}\n" + ); + } + } + + let unmatched = matchers + .into_iter() + .flatten() + .map(|m| m.0) + .collect::>(); + if !unmatched.is_empty() { + panic!( + "\n\nreturned errors: {result:?}\n\ + -> some matchers were not matched: {unmatched:?}\n" + ); + } + }) + } + + #[track_caller] + fn permutations(self, f: impl Fn(Result, Vec>)) { + self.files + .iter() + .permutations(self.files.len()) + .for_each(|files| { + println!( + "current permutation: {:?}", + files.iter().map(|f| &f.path).collect::>() + ); + + let mut verifier = IntegrityVerifier::new(self.env.keychain()); + verifier.allow_external_files(self.allow_external_files); + for file in files { + verifier.add(&file.path, file.mode, &file.contents); + } + f(verifier.verify()); + }) + } + } + + trait AsVerifiedPackage { + fn as_verified_package(&self) -> VerifiedPackage; + } + + impl AsVerifiedPackage for (&str, &str) { + fn as_verified_package(&self) -> VerifiedPackage { + VerifiedPackage { + product: self.0.into(), + package: self.1.into(), + proxies_paths: BTreeMap::new(), + } + } + } + + impl AsVerifiedPackage for VerifiedPackage { + fn as_verified_package(&self) -> VerifiedPackage { + self.clone() + } + } +} diff --git a/crates/criticaltrust/src/keys/algorithms/ecdsa_p256_sha256_asn1_spki_der.rs b/crates/criticaltrust/src/keys/algorithms/ecdsa_p256_sha256_asn1_spki_der.rs new file mode 100644 index 00000000..c0e5aad9 --- /dev/null +++ b/crates/criticaltrust/src/keys/algorithms/ecdsa_p256_sha256_asn1_spki_der.rs @@ -0,0 +1,136 @@ +use crate::keys::algorithms::Algorithm; +use crate::keys::newtypes::{PayloadBytes, PrivateKeyBytes, PublicKeyBytes, SignatureBytes}; +use crate::Error; +use elliptic_curve::pkcs8::{DecodePrivateKey, DecodePublicKey, EncodePrivateKey, EncodePublicKey}; +use p256::ecdsa::signature::{Signer, Verifier}; +use p256::ecdsa::{Signature, SigningKey, VerifyingKey}; +use p256::{PublicKey, SecretKey}; + +pub(super) struct EcdsaP256Sha256Asn1SpkiDer; + +impl Algorithm for EcdsaP256Sha256Asn1SpkiDer { + fn sign( + &self, + private_key: &PrivateKeyBytes<'_>, + payload: &PayloadBytes<'_>, + ) -> Result, Error> { + let key = SigningKey::from( + SecretKey::from_pkcs8_der(private_key.as_bytes()) + .map_err(|e| Error::InvalidKey(e.to_string()))?, + ); + + let signature: Signature = key.sign(payload.as_bytes()); + Ok(SignatureBytes::owned( + signature.to_der().to_bytes().to_vec(), + )) + } + + fn verify( + &self, + public_key: &PublicKeyBytes<'_>, + payload: &PayloadBytes<'_>, + signature: &SignatureBytes<'_>, + ) -> Result<(), Error> { + let key = VerifyingKey::from( + PublicKey::from_public_key_der(public_key.as_bytes()) + .map_err(|e| Error::InvalidKey(e.to_string()))?, + ); + + let signature = + Signature::from_der(signature.as_bytes()).map_err(|_| Error::VerificationFailed)?; + key.verify(payload.as_bytes(), &signature) + .map_err(|_| Error::VerificationFailed) + } + + fn generate_private_key(&self) -> Result, Error> { + let key = SecretKey::random(&mut rand_core::OsRng); + Ok(PrivateKeyBytes::owned( + key.to_pkcs8_der() + .expect("generated private key cannot be encoded") + .to_bytes() + .to_vec(), + )) + } + + fn derive_public_key_from_private_key( + &self, + private_key: &PrivateKeyBytes<'_>, + ) -> Result, Error> { + let key = SecretKey::from_pkcs8_der(private_key.as_bytes()) + .map_err(|e| Error::InvalidKey(e.to_string()))?; + Ok(PublicKeyBytes::owned( + key.public_key() + .to_public_key_der() + .map_err(|e| Error::InvalidKey(e.to_string()))? + .to_vec(), + )) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::{base64_decode, base64_encode}; + + // Manually generated by invoking the methods. + const PRIVATE_KEY: &str = "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgneZv/FWLuK6glg1byqqauteGYe0VDoHmpK9jvt+yzuqhRANCAASi3D+Cfz/MWR26spM2VWBEmV+uhT5k9VdGFRIyuv1F6Rjjfma7EAWg+m3cU8L+BtYeYxx0hGmkQK591DUnLnnO"; + const PUBLIC_KEY: &str = "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEotw/gn8/zFkdurKTNlVgRJlfroU+ZPVXRhUSMrr9RekY435muxAFoPpt3FPC/gbWHmMcdIRppECufdQ1Jy55zg=="; + const SIGNATURE: &str = "MEQCIC6vI/lSbiusPBh7RSyw0N7en09FPXXF4AoP8va/j461AiAEfOAn6TSx6eAwxqm92luJZWu06R2JFVq1NA333s7njA=="; + const PLAINTEXT: PayloadBytes<'static> = PayloadBytes::borrowed(b"Hello world"); + + #[test] + fn test_generated_keys_are_not_equal() -> Result<(), Error> { + let key_a = EcdsaP256Sha256Asn1SpkiDer.generate_private_key()?; + let key_b = EcdsaP256Sha256Asn1SpkiDer.generate_private_key()?; + + assert_ne!(key_a, key_b); + Ok(()) + } + + #[test] + fn test_derive_public_key() -> Result<(), Error> { + assert_eq!( + PUBLIC_KEY, + base64_encode( + EcdsaP256Sha256Asn1SpkiDer + .derive_public_key_from_private_key(&PrivateKeyBytes::owned(b64(PRIVATE_KEY)))? + .as_bytes() + ), + ); + Ok(()) + } + + #[test] + fn test_verify() { + assert!(EcdsaP256Sha256Asn1SpkiDer + .verify( + &PublicKeyBytes::owned(b64(PUBLIC_KEY)), + &PLAINTEXT, + &SignatureBytes::owned(b64(SIGNATURE)) + ) + .is_ok()); + + let mut broken_signature = b64(SIGNATURE); + broken_signature[0] = broken_signature[0].wrapping_add(1); + assert!(EcdsaP256Sha256Asn1SpkiDer + .verify( + &PublicKeyBytes::owned(b64(PUBLIC_KEY)), + &PLAINTEXT, + &SignatureBytes::owned(broken_signature) + ) + .is_err()); + } + + #[test] + fn test_sign() -> Result<(), Error> { + let signature = EcdsaP256Sha256Asn1SpkiDer + .sign(&PrivateKeyBytes::owned(b64(PRIVATE_KEY)), &PLAINTEXT)?; + assert_eq!(SIGNATURE, base64_encode(signature.as_bytes())); + + Ok(()) + } + + fn b64(encoded: &str) -> Vec { + base64_decode(encoded).unwrap() + } +} diff --git a/crates/criticaltrust/src/keys/algorithms/mod.rs b/crates/criticaltrust/src/keys/algorithms/mod.rs new file mode 100644 index 00000000..01897de0 --- /dev/null +++ b/crates/criticaltrust/src/keys/algorithms/mod.rs @@ -0,0 +1,78 @@ +mod ecdsa_p256_sha256_asn1_spki_der; + +use crate::keys::algorithms::ecdsa_p256_sha256_asn1_spki_der::EcdsaP256Sha256Asn1SpkiDer; +use crate::keys::newtypes::{PayloadBytes, PrivateKeyBytes, PublicKeyBytes, SignatureBytes}; +use crate::Error; +use serde::{Deserialize, Serialize}; + +/// Cryptographic algorithm used for signature verification. +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Copy, Clone)] +pub enum KeyAlgorithm { + /// ECDSA using P256 and SHA256, encoded as ASN.1, with the public key encoded as SPKI with + /// DER. + #[serde(rename = "ecdsa-p256-sha256-asn1-spki-der")] + EcdsaP256Sha256Asn1SpkiDer, + #[serde(other)] + #[doc(hidden)] + Unknown, +} + +impl KeyAlgorithm { + pub(crate) fn methods(&self) -> &'static dyn Algorithm { + match self { + KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer => &EcdsaP256Sha256Asn1SpkiDer, + KeyAlgorithm::Unknown => &UnknownAlgorithm, + } + } +} + +pub(crate) trait Algorithm { + fn sign( + &self, + private_key: &PrivateKeyBytes<'_>, + payload: &PayloadBytes<'_>, + ) -> Result, Error>; + fn verify( + &self, + public_key: &PublicKeyBytes<'_>, + payload: &PayloadBytes<'_>, + signature: &SignatureBytes<'_>, + ) -> Result<(), Error>; + fn generate_private_key(&self) -> Result, Error>; + fn derive_public_key_from_private_key( + &self, + private_key: &PrivateKeyBytes<'_>, + ) -> Result, Error>; +} + +struct UnknownAlgorithm; + +impl Algorithm for UnknownAlgorithm { + fn sign( + &self, + _: &PrivateKeyBytes<'_>, + _: &PayloadBytes<'_>, + ) -> Result, Error> { + Err(Error::UnsupportedKey) + } + + fn verify( + &self, + _: &PublicKeyBytes<'_>, + _: &PayloadBytes<'_>, + _: &SignatureBytes<'_>, + ) -> Result<(), Error> { + Err(Error::UnsupportedKey) + } + + fn generate_private_key(&self) -> Result, Error> { + Err(Error::UnsupportedKey) + } + + fn derive_public_key_from_private_key( + &self, + _: &PrivateKeyBytes<'_>, + ) -> Result, Error> { + Err(Error::UnsupportedKey) + } +} diff --git a/crates/criticaltrust/src/keys/mod.rs b/crates/criticaltrust/src/keys/mod.rs new file mode 100644 index 00000000..06d8621c --- /dev/null +++ b/crates/criticaltrust/src/keys/mod.rs @@ -0,0 +1,20 @@ +//! Low-level keys and digital signature management. +//! +//! This module provides a safe and secure abstraction over the cryptographic primitives used by +//! criticaltrust, with the goal of preventing misuse through the type system. For a higher level +//! abstraction, check out the [`signatures`](crate::signatures) module. + +mod algorithms; +pub mod newtypes; +mod pair; +#[cfg(feature = "aws-kms")] +mod pair_aws_kms; +mod pair_ephemeral; +mod public; + +pub use algorithms::KeyAlgorithm; +pub use pair::KeyPair; +#[cfg(feature = "aws-kms")] +pub use pair_aws_kms::AwsKmsKeyPair; +pub use pair_ephemeral::EphemeralKeyPair; +pub use public::{KeyId, KeyRole, PublicKey}; diff --git a/crates/criticaltrust/src/keys/newtypes.rs b/crates/criticaltrust/src/keys/newtypes.rs new file mode 100644 index 00000000..eee67ea7 --- /dev/null +++ b/crates/criticaltrust/src/keys/newtypes.rs @@ -0,0 +1,70 @@ +//! Newtypes used by the `keys` module to prevent errors at the type system level. +//! +//! APIs in the `keys` module require multiple byte slices as their input, and it would be easy to +//! accidentally pass a public key when a signature is required (for example). To prevent these +//! kinds of errors at compile time, this module defines newtypes used across criticaltrust. + +use crate::serde_base64::SerdeBase64; +use serde::{Deserialize, Serialize}; +use std::borrow::Cow; + +macro_rules! newtypes { + ( + $( + $(#[$meta:meta])* + $vis:vis struct $name:ident(..); + )* + ) => { + $( + $(#[$meta])* + #[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] + #[serde(transparent)] + $vis struct $name<'a>(Cow<'a, [u8]>); + + #[allow(unused)] + impl<'a> $name<'a> { + /// Create a new instance using borrowed content. + $vis const fn borrowed(data: &'a [u8]) -> Self{ + Self(Cow::Borrowed(data)) + } + + /// Create a new instance using owned content. + $vis fn owned(data: Vec) -> Self { + Self(Cow::Owned(data)) + } + + /// Return the underlying bytes representation. + $vis fn as_bytes(&self) -> &[u8] { + &self.0 + } + } + + impl SerdeBase64 for $name<'static> { + fn from_bytes(bytes: Vec) -> Result { + Ok(Self(Cow::Owned(bytes))) + } + + fn to_bytes(&self) -> &[u8] { + &self.0 + } + } + )* + } +} + +newtypes! { + /// Contains the bytes representing a public key. + /// + /// The format of the public key depends on the algorithm used. + pub struct PublicKeyBytes(..); + + /// Contains the bytes representing a payload to sign or verify. + pub struct PayloadBytes(..); + + /// Contains the bytes representing a signature. + /// + /// The format of the signature depends on the algorithm used. + pub struct SignatureBytes(..); + + pub(crate) struct PrivateKeyBytes(..); +} diff --git a/crates/criticaltrust/src/keys/pair.rs b/crates/criticaltrust/src/keys/pair.rs new file mode 100644 index 00000000..3f1017f1 --- /dev/null +++ b/crates/criticaltrust/src/keys/pair.rs @@ -0,0 +1,17 @@ +use crate::keys::newtypes::{PayloadBytes, SignatureBytes}; +use crate::keys::PublicKey; +use crate::Error; + +/// Pair of public and private keys capable of signing. +/// +/// A key pair is required whenever signing operations are performed, and the trait allows swapping +/// implementations depending on the use case. The private portion of the pair doesn't have to be +/// stored on the local system. For example, it's possible and recommended for implementations of +/// [`KeyPair`] to rely on a Hardware Security Module. +pub trait KeyPair { + /// Retrieve the [`PublicKey`] associated with this key pair. + fn public(&self) -> &PublicKey; + + /// Sign the provided data with this key pair, returning the bytes of the signature. + fn sign(&self, data: &PayloadBytes<'_>) -> Result, Error>; +} diff --git a/crates/criticaltrust/src/keys/pair_aws_kms.rs b/crates/criticaltrust/src/keys/pair_aws_kms.rs new file mode 100644 index 00000000..c8216c19 --- /dev/null +++ b/crates/criticaltrust/src/keys/pair_aws_kms.rs @@ -0,0 +1,263 @@ +use super::newtypes::{PayloadBytes, SignatureBytes}; +use super::PublicKey; +use crate::keys::newtypes::PublicKeyBytes; +use crate::keys::{KeyAlgorithm, KeyPair, KeyRole}; +use crate::sha256::hash_sha256; +use crate::Error; +use aws_sdk_kms::primitives::Blob; +use aws_sdk_kms::types::{KeySpec, MessageType, SigningAlgorithmSpec}; +use aws_sdk_kms::Client; +use tokio::runtime::Handle; + +/// Pair of public and private keys stored in [AWS KMS](https://aws.amazon.com/kms/). +/// +/// The private key is exclusively stored inside of AWS KMS, and this struct makes network calls to +/// kMS for every signature request. The public key is downloaded locally when the struct is +/// instantiated, and signatures are verified without making network calls. +pub struct AwsKmsKeyPair { + handle: Handle, + kms: Client, + key_id: String, + public_key: PublicKey, +} + +impl AwsKmsKeyPair { + /// Load an AWS KMS asymmetric key. The key must exist, and must use one of the algorithms + /// supported by criticaltrust. + pub fn new( + key_id: &str, + tokio_handle: Handle, + kms_client: Client, + role: KeyRole, + ) -> Result { + let public_key_response = + tokio_handle.block_on(kms_client.get_public_key().key_id(key_id).send())?; + + let public_key = match public_key_response.key_spec() { + Some(KeySpec::EccNistP256) + if public_key_response + .signing_algorithms() + .contains(&SigningAlgorithmSpec::EcdsaSha256) => + { + PublicKey { + role, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: None, + public: PublicKeyBytes::owned( + public_key_response + .public_key() + .unwrap() + .clone() + .into_inner(), + ), + } + } + _ => return Err(Error::UnsupportedKey), + }; + + Ok(Self { + handle: tokio_handle, + kms: kms_client, + key_id: key_id.into(), + public_key, + }) + } +} + +impl KeyPair for AwsKmsKeyPair { + fn public(&self) -> &PublicKey { + &self.public_key + } + + fn sign(&self, data: &PayloadBytes<'_>) -> Result, Error> { + let (digest, algorithm) = match self.public_key.algorithm { + KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer => ( + hash_sha256(data.as_bytes()), + SigningAlgorithmSpec::EcdsaSha256, + ), + KeyAlgorithm::Unknown => return Err(Error::UnsupportedKey), + }; + + let response = self.handle.block_on( + self.kms + .sign() + .key_id(&self.key_id) + .message(Blob::new(digest)) + .message_type(MessageType::Digest) + .signing_algorithm(algorithm) + .send(), + )?; + + Ok(SignatureBytes::owned( + response.signature().unwrap().clone().into_inner(), + )) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use aws_sdk_kms::config::Credentials; + use aws_sdk_kms::types::KeyUsageType; + use rand_core::{OsRng, RngCore}; + use std::process::Command; + use std::sync::Once; + use tokio::runtime::Runtime; + + // We want to have tests for all of criticaltrust, which makes testing the integration with AWS + // KMS quite tricky. To make it work, the tests for this module spawn a Docker container for + // "localstack", a local replica of AWS services meant for testing. + + #[test] + fn test_roundtrip() { + let localstack = Localstack::init(); + let key = localstack.create_key(KeySpec::EccNistP256); + + let keypair = AwsKmsKeyPair::new( + &key, + localstack.runtime.handle().clone(), + localstack.client.clone(), + KeyRole::Root, + ) + .expect("failed to create key pair"); + + let payload = PayloadBytes::borrowed(b"Hello world"); + let signature = keypair.sign(&payload).expect("failed to sign"); + keypair + .public() + .verify(KeyRole::Root, &payload, &signature) + .expect("failed to verify"); + } + + #[test] + fn test_key_pair_with_unsupported_algorithm() { + let localstack = Localstack::init(); + let key = localstack.create_key(KeySpec::Rsa2048); + + let keypair = AwsKmsKeyPair::new( + &key, + localstack.runtime.handle().clone(), + localstack.client.clone(), + KeyRole::Root, + ); + assert!(matches!(keypair, Err(Error::UnsupportedKey))); + } + + struct Localstack { + runtime: Runtime, + client: Client, + container_name: String, + } + + impl Localstack { + fn init() -> Self { + let image = pull_localstack_docker_image(); + let container_name = format!("criticaltrust-localstack-{}", OsRng.next_u64()); + + run(Command::new("docker") + .arg("create") + .args(["--name", &container_name]) + .args(["-p", "4566"]) + .arg(image)); + run(Command::new("docker").args(["start", &container_name])); + + // localstack is bound to a random port to prevent conflicts between concurrent tests. + // We thus need to fetch the actual port number Docker assigned. + let ports = run(Command::new("docker").args(["port", &container_name, "4566/tcp"])); + let port = std::str::from_utf8(&ports) + .expect("non-utf-8 output of docker port") + .split('\n') + .next() + .expect("empty output of docker port") + .rsplit_once(':') + .expect("invalid output of docker port") + .1; + + let runtime = Runtime::new().expect("failed to create tokio runtime"); + let aws_config = runtime.block_on( + aws_config::from_env() + // localstack doesn't validate IAM credentials, so we can configure a dummy + // secret key and region. + .credentials_provider(Credentials::new( + "aws_access_key_id", + "aws_secret_access_key", + None, + None, + "hardcoded", + )) + .region("us-east-1") + .load(), + ); + + let kms_config = aws_sdk_kms::config::Builder::from(&aws_config) + .endpoint_url(format!("http://localhost:{port}")) + .build(); + let client = aws_sdk_kms::Client::from_conf(kms_config); + + Self { + runtime, + client, + container_name, + } + } + + fn create_key(&self, spec: KeySpec) -> String { + self.runtime + .block_on( + self.client + .create_key() + .key_usage(KeyUsageType::SignVerify) + .key_spec(spec) + .send(), + ) + .expect("failed to create kms key") + .key_metadata() + .unwrap() + .key_id() + .into() + } + } + + impl Drop for Localstack { + fn drop(&mut self) { + run(Command::new("docker").args(["stop", &self.container_name, "-t", "10"])); + run(Command::new("docker").args(["rm", &self.container_name])); + } + } + + fn pull_localstack_docker_image() -> &'static str { + const IMAGE: &str = "localstack/localstack:2.2.0"; + static ONCE: Once = Once::new(); + + ONCE.call_once(|| { + run(Command::new("docker").args(["pull", IMAGE])); + }); + + IMAGE + } + + fn run(command: &mut Command) -> Vec { + let repr = format!("{command:?}"); + eprintln!("running {repr}"); + match command.output() { + Ok(output) if output.status.success() => { + eprintln!("finished running {repr}"); + output.stdout + } + Ok(output) => { + eprintln!("failed to run command: exited with {}", output.status); + eprintln!("command: {repr}"); + eprintln!( + "\nstdout:\n=====\n{}\n=====", + String::from_utf8_lossy(&output.stdout) + ); + eprintln!( + "\nstderr:\n=====\n{}\n=====", + String::from_utf8_lossy(&output.stderr) + ); + panic!(); + } + Err(err) => panic!("command failed to start ({err}: {repr})"), + } + } +} diff --git a/crates/criticaltrust/src/keys/pair_ephemeral.rs b/crates/criticaltrust/src/keys/pair_ephemeral.rs new file mode 100644 index 00000000..bf3a9058 --- /dev/null +++ b/crates/criticaltrust/src/keys/pair_ephemeral.rs @@ -0,0 +1,116 @@ +use crate::keys::newtypes::{PayloadBytes, PrivateKeyBytes, SignatureBytes}; +use crate::keys::{KeyAlgorithm, KeyPair, KeyRole, PublicKey}; +use crate::Error; +use time::OffsetDateTime; + +/// Pair of public and private keys generated at runtime and kept in memory. +/// +/// There is intentionally no way to persist the private key of ephemeral key pairs, as that's +/// considerably less secure than storing the key in a Hardware Security Module. Ephemeral key +/// pairs are primarily meant to be used during automated testing. +pub struct EphemeralKeyPair { + public: PublicKey, + private: PrivateKeyBytes<'static>, +} + +impl EphemeralKeyPair { + /// Generate a new key pair using the given algorithm and key role. + pub fn generate( + algorithm: KeyAlgorithm, + role: KeyRole, + expiry: Option, + ) -> Result { + let private = algorithm.methods().generate_private_key()?; + + Ok(EphemeralKeyPair { + public: PublicKey { + role, + algorithm, + expiry, + public: algorithm + .methods() + .derive_public_key_from_private_key(&private)?, + }, + private, + }) + } +} + +impl KeyPair for EphemeralKeyPair { + fn public(&self) -> &PublicKey { + &self.public + } + + fn sign(&self, data: &PayloadBytes<'_>) -> Result, Error> { + self.public.algorithm.methods().sign(&self.private, data) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + const ALGORITHM: KeyAlgorithm = KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer; + + #[test] + fn test_unique_keys_are_generated() { + assert_ne!( + EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None) + .unwrap() + .private, + EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None) + .unwrap() + .private + ); + } + + #[test] + fn test_key_with_unknown_algorithm_is_not_generated() { + assert!(matches!( + EphemeralKeyPair::generate(KeyAlgorithm::Unknown, KeyRole::Root, None), + Err(Error::UnsupportedKey), + )); + } + + #[test] + fn test_public_key_is_expected() { + let key = EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None).unwrap(); + + let public = key.public(); + assert_eq!(public.role, KeyRole::Root); + assert_eq!(public.algorithm, ALGORITHM); + assert_eq!( + public.public, + ALGORITHM + .methods() + .derive_public_key_from_private_key(&key.private) + .unwrap() + ); + } + + #[test] + fn test_signatures_are_valid() { + let key = EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None).unwrap(); + let data = PayloadBytes::borrowed(b"Hello world"); + + // We can't verify the exact signature is what we expect, as each signature includes random + // data in it. Instead, we ensure it's correct. + let signature = key.sign(&data).unwrap(); + + assert!(ALGORITHM + .methods() + .verify(&key.public.public, &data, &signature) + .is_ok()); + } + + #[test] + fn test_sign_with_unknown_algorithm_fails() { + let mut key = EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None).unwrap(); + key.public.algorithm = KeyAlgorithm::Unknown; + + assert!(matches!( + key.sign(&PayloadBytes::borrowed(b"Hello world")), + Err(Error::UnsupportedKey) + )); + } +} diff --git a/crates/criticaltrust/src/keys/public.rs b/crates/criticaltrust/src/keys/public.rs new file mode 100644 index 00000000..8f5e9389 --- /dev/null +++ b/crates/criticaltrust/src/keys/public.rs @@ -0,0 +1,410 @@ +use super::newtypes::SignatureBytes; +use crate::keys::newtypes::{PayloadBytes, PublicKeyBytes}; +use crate::keys::KeyAlgorithm; +use crate::sha256::hash_sha256; +use crate::signatures::{PublicKeysRepository, Signable}; +use crate::Error; +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; + +/// Public key used for verification of signed payloads. +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] +pub struct PublicKey { + pub role: KeyRole, + pub algorithm: KeyAlgorithm, + #[serde(with = "time::serde::rfc3339::option")] + pub expiry: Option, + #[serde(with = "crate::serde_base64")] + pub public: PublicKeyBytes<'static>, +} + +impl PublicKey { + /// Verify whether the provided payload matches the provided signature. Signature verification + /// could fail if: + /// + /// * The expected key role is different than the current key role. + /// * The current key expired. + /// * The signature doesn't match the payload. + /// * The signature wasn't performed by the current key. + pub fn verify( + &self, + role: KeyRole, + payload: &PayloadBytes<'_>, + signature: &SignatureBytes<'_>, + ) -> Result<(), Error> { + if role != self.role || role == KeyRole::Unknown { + return Err(Error::VerificationFailed); + } + + if let Some(expiry) = self.expiry { + if OffsetDateTime::now_utc() > expiry { + return Err(Error::VerificationFailed); + } + } + + self.algorithm + .methods() + .verify(&self.public, payload, signature) + } + + /// Calculate and return the ID of this public key. This is a relatively expensive operation, + /// so it's better to cache or clone the resulting ID rather than recalculating it on the fly. + pub fn calculate_id(&self) -> KeyId { + KeyId(hash_sha256(self.public.as_bytes())) + } + + /// Checks whether this public key is supported by this version of CriticalUp. + pub fn is_supported(&self) -> bool { + self.role != KeyRole::Unknown && self.algorithm != KeyAlgorithm::Unknown + } +} + +impl PublicKeysRepository for PublicKey { + fn get<'a>(&'a self, id: &KeyId) -> Option<&'a PublicKey> { + if *id == self.calculate_id() { + Some(self) + } else { + None + } + } +} + +impl Signable for PublicKey { + const SIGNED_BY_ROLE: KeyRole = KeyRole::Root; +} + +/// Role of the key, used to determine which kinds of payloads the key is authorized to verify. +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Copy, Clone)] +#[serde(rename_all = "kebab-case")] +pub enum KeyRole { + /// `releases` key role, used to sign releases. + Releases, + /// `packages` key role, used to sign packages. + Packages, + /// `redirects` key role, used to sign dynamic server redirects. + Redirects, + /// `root` key role, used to sign other keys. + Root, + #[serde(other)] + #[doc(hidden)] + Unknown, +} + +/// Opaque unique identifier for any given key. +/// +/// You can obtain it by calling [`PublicKey::calculate_id`]. +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)] +#[serde(transparent)] +pub struct KeyId(#[serde(with = "crate::serde_base64")] Vec); + +#[cfg(test)] +mod tests { + use super::*; + use crate::keys::{EphemeralKeyPair, KeyPair}; + use crate::test_utils::base64_decode; + use time::Duration; + + const SAMPLE_PAYLOAD: PayloadBytes<'static> = PayloadBytes::borrowed(b"Hello world"); + const SAMPLE_KEY_ID: &str = "nvb7o7wel0FvL/hZ/P4yI3JJRfYYjTXZPpdV+xNQqTA="; + const SAMPLE_KEY: &str = "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAGDPB8wZg17bAny3c0jPNg8wmnylcKtCLuPnX3GfwEQDf6ydkD1qnOPtMCZBh0P521Q5evvQ1e/rHsjrbBVPMQ=="; + const SAMPLE_SIGNATURE: &str = "MEYCIQC8MN8dk0jkZo1GIY8EZSaLpnDPUqR29E9eerKPjRyeJwIhAOd21m1VqpldE4kagUVZOUL0Pb/EZTQ0ry8ltbC446sh"; + + #[test] + fn test_verify_matches_with_no_expiration() { + let key = generate(KeyRole::Root, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(key + .public() + .verify(KeyRole::Root, &SAMPLE_PAYLOAD, &signature) + .is_ok()) + } + + #[test] + fn test_verify_matches_with_valid_expiration() { + let key = generate(KeyRole::Root, hours_diff(1)); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(key + .public() + .verify(KeyRole::Root, &SAMPLE_PAYLOAD, &signature) + .is_ok()); + } + + #[test] + fn test_verify_fails_with_different_role() { + let key = generate(KeyRole::Root, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(matches!( + key.public() + .verify(KeyRole::Packages, &SAMPLE_PAYLOAD, &signature), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_the_unknown_role() { + let key = generate(KeyRole::Unknown, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(matches!( + key.public() + .verify(KeyRole::Unknown, &SAMPLE_PAYLOAD, &signature), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_expired_key() { + let key = generate(KeyRole::Root, hours_diff(-1)); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(matches!( + key.public() + .verify(KeyRole::Root, &SAMPLE_PAYLOAD, &signature), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_incorrect_signature() { + let key = generate(KeyRole::Root, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + let mut bad_signature = signature.as_bytes().to_vec(); + *bad_signature.last_mut().unwrap() = bad_signature.last().unwrap().wrapping_add(1); + + assert!(matches!( + key.public().verify( + KeyRole::Root, + &SAMPLE_PAYLOAD, + &SignatureBytes::owned(bad_signature) + ), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_incorrect_payload() { + let key = generate(KeyRole::Root, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(matches!( + key.public().verify( + KeyRole::Root, + &PayloadBytes::borrowed("Hello world!".as_bytes()), + &signature + ), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_empty_signature() { + let key = generate(KeyRole::Root, None); + + assert!(matches!( + key.public().verify( + KeyRole::Root, + &SAMPLE_PAYLOAD, + &SignatureBytes::borrowed(&[]) + ), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_wrong_key() { + let key1 = generate(KeyRole::Root, None); + let key2 = generate(KeyRole::Root, None); + + let signature = key1.sign(&SAMPLE_PAYLOAD).unwrap(); + + assert!(matches!( + key2.public() + .verify(KeyRole::Root, &SAMPLE_PAYLOAD, &signature), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_verify_fails_with_unknown_algorithm() { + let key = generate(KeyRole::Root, None); + let signature = key.sign(&SAMPLE_PAYLOAD).unwrap(); + + let mut public = key.public().clone(); + public.algorithm = KeyAlgorithm::Unknown; + + assert!(matches!( + public.verify(KeyRole::Root, &SAMPLE_PAYLOAD, &signature), + Err(Error::UnsupportedKey) + )); + } + + #[test] + fn test_calculate_id() { + let key = PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: None, + public: PublicKeyBytes::owned(base64_decode(SAMPLE_KEY).unwrap()), + }; + assert_eq!( + key.calculate_id(), + // base64-encoded sha256 of the key above + KeyId(base64_decode(SAMPLE_KEY_ID).unwrap()) + ); + } + + #[test] + fn test_is_key_supported() { + let key = |role, algorithm| PublicKey { + role, + algorithm, + expiry: None, + public: PublicKeyBytes::owned(base64_decode(SAMPLE_KEY).unwrap()), + }; + + assert!(!key(KeyRole::Unknown, KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer).is_supported()); + assert!(!key(KeyRole::Root, KeyAlgorithm::Unknown).is_supported()); + assert!(!key(KeyRole::Unknown, KeyAlgorithm::Unknown).is_supported()); + + // Test just a few positive combinations + assert!(key(KeyRole::Root, KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer).is_supported()); + assert!(key(KeyRole::Packages, KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer).is_supported()); + } + + #[test] + fn test_verify_using_deserialized_key() { + let key: PublicKey = serde_json::from_str( + &r#"{ + "role": "root", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": null, + "public": "$$PUBLICKEY$$" + }"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + ) + .unwrap(); + + // Ensure the key can verify messages signed with the corresponding private key. + key.verify( + KeyRole::Root, + &SAMPLE_PAYLOAD, + &SignatureBytes::owned(base64_decode(SAMPLE_SIGNATURE).unwrap()), + ) + .unwrap(); + } + + #[test] + fn test_key_deserialization_without_expiry() { + let key: PublicKey = serde_json::from_str( + &r#"{ + "role": "root", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": null, + "public": "$$PUBLICKEY$$" + }"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + ) + .unwrap(); + + assert_eq!(key.role, KeyRole::Root); + assert_eq!(key.algorithm, KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer); + assert_eq!(key.expiry, None); + assert_eq!(key.public.as_bytes(), base64_decode(SAMPLE_KEY).unwrap()); + } + + #[test] + fn test_key_deserialization_with_expiry() { + let key: PublicKey = serde_json::from_str( + &r#"{ + "role": "packages", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": "2022-03-18T12:04:00+01:00", + "public": "$$PUBLICKEY$$" + }"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + ) + .unwrap(); + + assert_eq!(key.role, KeyRole::Packages); + assert_eq!(key.algorithm, KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer); + assert_eq!(key.expiry, Some(date("2022-03-18T12:04:00+01:00"))); + assert_eq!(key.public.as_bytes(), base64_decode(SAMPLE_KEY).unwrap()); + } + + #[test] + fn test_key_deserialization_with_unknown_algorithm() { + let key: PublicKey = serde_json::from_str( + &r#"{ + "role": "packages", + "algorithm": "morse-encoding", + "expiry": null, + "public": "$$PUBLICKEY$$" + }"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + ) + .unwrap(); + + assert_eq!(key.role, KeyRole::Packages); + assert_eq!(key.algorithm, KeyAlgorithm::Unknown); + assert_eq!(key.expiry, None); + assert_eq!(key.public.as_bytes(), base64_decode(SAMPLE_KEY).unwrap()); + } + + #[test] + fn test_key_serialization_without_expiry() { + let key = PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: None, + public: PublicKeyBytes::owned(base64_decode(SAMPLE_KEY).unwrap()), + }; + + assert_eq!( + r#"{ + "role": "root", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": null, + "public": "$$PUBLICKEY$$" +}"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + serde_json::to_string_pretty(&key).unwrap() + ); + } + + #[test] + fn test_key_serialization_with_expiry() { + let key = PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: Some(date("2022-03-18T12:04:00+01:00")), + public: PublicKeyBytes::owned(base64_decode(SAMPLE_KEY).unwrap()), + }; + + assert_eq!( + r#"{ + "role": "root", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": "2022-03-18T12:04:00+01:00", + "public": "$$PUBLICKEY$$" +}"# + .replace("$$PUBLICKEY$$", SAMPLE_KEY), + serde_json::to_string_pretty(&key).unwrap() + ); + } + + fn date(rfc3339: &str) -> OffsetDateTime { + OffsetDateTime::parse(rfc3339, &time::format_description::well_known::Rfc3339).unwrap() + } + + fn hours_diff(diff: i64) -> Option { + Some(OffsetDateTime::now_utc() + Duration::hours(diff)) + } + + fn generate(role: KeyRole, expiry: Option) -> EphemeralKeyPair { + EphemeralKeyPair::generate(KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, role, expiry).unwrap() + } +} diff --git a/crates/criticaltrust/src/lib.rs b/crates/criticaltrust/src/lib.rs new file mode 100644 index 00000000..d53f3c7f --- /dev/null +++ b/crates/criticaltrust/src/lib.rs @@ -0,0 +1,14 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] + +pub mod errors; +pub mod integrity; +pub mod keys; +pub mod manifests; +mod serde_base64; +mod sha256; +pub mod signatures; + +#[cfg(test)] +mod test_utils; + +pub use errors::Error; diff --git a/crates/criticaltrust/src/manifests.rs b/crates/criticaltrust/src/manifests.rs new file mode 100644 index 00000000..26f4fddf --- /dev/null +++ b/crates/criticaltrust/src/manifests.rs @@ -0,0 +1,195 @@ +//! Serializable and deserializable representation of criticaltrust manifests. + +use crate::keys::{KeyRole, PublicKey}; +use crate::signatures::{Signable, SignedPayload}; +use serde::de::Error as _; +use serde::{Deserialize, Serialize}; + +/// Typed representation of a manifest version number. +/// +/// The version number is stored as a const generic rather than as a field of the struct. This is +/// done to: +/// +/// * Verify that the version number is correct as part of the deserialization process. +/// * Simplify constructing manifests: you don't have to specify the version number, type +/// inference will figure out the right one. +#[derive(Clone, PartialEq, Eq)] +pub struct ManifestVersion; + +impl std::fmt::Debug for ManifestVersion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("ManifestVersion").field(&V).finish() + } +} + +impl Serialize for ManifestVersion { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_u32(V) + } +} + +impl<'de, const V: u32> Deserialize<'de> for ManifestVersion { + fn deserialize>(deserializer: D) -> Result { + let raw = u32::deserialize(deserializer)?; + if raw != V { + Err(D::Error::custom(format!( + "expected version {V}, found version {raw}" + ))) + } else { + Ok(ManifestVersion) + } + } +} + +// Redirects + +#[derive(Debug, Serialize, Deserialize)] +pub struct RedirectManifest { + pub version: ManifestVersion<1>, + #[serde(flatten)] + pub payload: SignedPayload, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Redirect { + pub nonce: String, + pub to: String, +} + +impl Signable for Redirect { + const SIGNED_BY_ROLE: KeyRole = KeyRole::Redirects; +} + +// Releases + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReleaseManifest { + pub version: ManifestVersion<1>, + #[serde(flatten)] + pub signed: SignedPayload, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Release { + pub product: String, + pub release: String, + pub commit: String, + pub packages: Vec, +} + +impl Signable for Release { + const SIGNED_BY_ROLE: KeyRole = KeyRole::Releases; +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReleasePackage { + pub package: String, + pub artifacts: Vec, + pub dependencies: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReleaseArtifact { + pub format: ReleaseArtifactFormat, + pub size: usize, + #[serde(with = "crate::serde_base64")] + pub sha256: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub enum ReleaseArtifactFormat { + #[serde(rename = "tar.zst")] + TarZst, + #[serde(rename = "tar.xz")] + TarXz, + #[serde(other)] + #[doc(hidden)] + Unknown, +} + +impl std::fmt::Display for ReleaseArtifactFormat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + ReleaseArtifactFormat::TarZst => "tar.zst", + ReleaseArtifactFormat::TarXz => "tar.xz", + ReleaseArtifactFormat::Unknown => "unknown", + }; + write!(f, "{}", s) + } +} + +// Packages + +#[derive(Debug, Serialize, Deserialize)] +pub struct PackageManifest { + pub version: ManifestVersion<1>, + #[serde(flatten)] + pub signed: SignedPayload, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Package { + pub product: String, + pub package: String, + pub commit: String, + pub files: Vec, + pub managed_prefixes: Vec, +} + +impl Signable for Package { + const SIGNED_BY_ROLE: KeyRole = KeyRole::Packages; +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct PackageFile { + pub path: String, + pub posix_mode: u32, + #[serde(with = "crate::serde_base64")] + pub sha256: Vec, + pub needs_proxy: bool, +} + +// Keys + +#[derive(Debug, Serialize, Deserialize)] +pub struct KeysManifest { + pub version: ManifestVersion<1>, + pub keys: Vec>, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_manifest_version_debug() { + assert_eq!("ManifestVersion(1)", format!("{:?}", ManifestVersion::<1>)); + assert_eq!( + "ManifestVersion(42)", + format!("{:?}", ManifestVersion::<42>) + ); + } + + #[test] + fn test_manifest_version_serialize() { + assert_eq!("1", serde_json::to_string(&ManifestVersion::<1>).unwrap()); + assert_eq!("42", serde_json::to_string(&ManifestVersion::<42>).unwrap()); + } + + #[test] + fn test_manifest_version_deserialize() { + assert_eq!( + ManifestVersion, + serde_json::from_str::>("1").unwrap() + ); + assert_eq!( + ManifestVersion, + serde_json::from_str::>("42").unwrap() + ); + + assert!(serde_json::from_str::>("42").is_err()); + assert!(serde_json::from_str::>("1").is_err()); + } +} diff --git a/crates/criticaltrust/src/serde_base64.rs b/crates/criticaltrust/src/serde_base64.rs new file mode 100644 index 00000000..bae15219 --- /dev/null +++ b/crates/criticaltrust/src/serde_base64.rs @@ -0,0 +1,111 @@ +//! Wrapper functions to encode/decode a string as base64 during serialization and deserialization. +//! The module is supposed to be used by adding `#[serde(with = "crate::serde_base64")]` to each +//! field you want to encode/decode as base64. + +use base64::engine::GeneralPurpose; +use base64::Engine; +use serde::de::Visitor; +use serde::{Deserializer, Serializer}; +use std::marker::PhantomData; + +const ENGINE: &GeneralPurpose = &base64::engine::general_purpose::STANDARD; + +pub(crate) fn serialize(value: &T, serializer: S) -> Result +where + T: SerdeBase64, + S: Serializer, +{ + serializer.serialize_str(&ENGINE.encode(value.to_bytes())) +} + +pub(crate) fn deserialize<'de, T, D>(deserializer: D) -> Result +where + T: SerdeBase64, + D: Deserializer<'de>, +{ + struct Base64Visitor(PhantomData); + + impl<'de, T: SerdeBase64> Visitor<'de> for Base64Visitor { + type Value = T; + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "a base64-encoded string") + } + + fn visit_str(self, v: &str) -> Result { + let decoded = ENGINE.decode(v).map_err(E::custom)?; + T::from_bytes(decoded).map_err(E::custom) + } + } + + deserializer.deserialize_str(Base64Visitor(PhantomData)) +} + +pub(crate) trait SerdeBase64: Sized { + fn from_bytes(bytes: Vec) -> Result; + fn to_bytes(&self) -> &[u8]; +} + +impl SerdeBase64 for Vec { + fn from_bytes(bytes: Vec) -> Result { + Ok(bytes) + } + + fn to_bytes(&self) -> &[u8] { + self.as_slice() + } +} + +impl SerdeBase64 for String { + fn from_bytes(bytes: Vec) -> Result { + String::from_utf8(bytes).map_err(|e| e.to_string()) + } + + fn to_bytes(&self) -> &[u8] { + self.as_bytes() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde::{Deserialize, Serialize}; + + #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] + struct Foo { + plain: T, + #[serde(with = "crate::serde_base64")] + encoded: T, + } + + #[test] + fn test_serialize_deserialize_string() { + let initial = Foo { + plain: "Foo bar".into(), + encoded: "Hello world".into(), + }; + + let encoded = serde_json::to_string(&initial).unwrap(); + assert_eq!( + "{\"plain\":\"Foo bar\",\"encoded\":\"SGVsbG8gd29ybGQ=\"}", + encoded + ); + + let decoded: Foo = serde_json::from_str(&encoded).unwrap(); + assert_eq!(initial, decoded); + } + + #[test] + fn test_serialize_deserialize_vec_u8() { + let initial = Foo { + plain: vec![1, 2, 3, 4], + encoded: vec![5, 6, 7, 8], + }; + + let encoded = serde_json::to_string(&initial).unwrap(); + assert_eq!("{\"plain\":[1,2,3,4],\"encoded\":\"BQYHCA==\"}", encoded); + + let decoded: Foo> = serde_json::from_str(&encoded).unwrap(); + assert_eq!(initial, decoded); + } +} diff --git a/crates/criticaltrust/src/sha256.rs b/crates/criticaltrust/src/sha256.rs new file mode 100644 index 00000000..c219744f --- /dev/null +++ b/crates/criticaltrust/src/sha256.rs @@ -0,0 +1,28 @@ +use sha2::{Digest, Sha256}; + +/// Helper function to hash bytes with SHA256. It's a simple wrapper on top of the sha2 crate, +/// turning the three method calls into a function call. +pub(crate) fn hash_sha256(contents: &[u8]) -> Vec { + let mut hasher = Sha256::new(); + hasher.update(contents); + hasher.finalize().to_vec() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_hash_sha256() { + const PLAINTEXT: &str = "Hello world"; + const HASHED: &str = "64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c"; + + let hashed = hash_sha256(PLAINTEXT.as_bytes()); + + let mut hashed_hex = String::new(); + for byte in hashed { + hashed_hex.push_str(&format!("{byte:0>2x}")); + } + assert_eq!(HASHED, hashed_hex); + } +} diff --git a/crates/criticaltrust/src/signatures/keychain.rs b/crates/criticaltrust/src/signatures/keychain.rs new file mode 100644 index 00000000..598a9d74 --- /dev/null +++ b/crates/criticaltrust/src/signatures/keychain.rs @@ -0,0 +1,171 @@ +use crate::keys::{KeyId, KeyRole, PublicKey}; +use crate::signatures::{PublicKeysRepository, SignedPayload}; +use crate::Error; +use std::collections::HashMap; + +/// Collection of all trusted public keys. +pub struct Keychain { + keys: HashMap, +} + +impl Keychain { + /// Create a new keychain, using the provided public key as the root of trust. + /// + /// The root of trust has to have the `root` key role, and all future keys added to the + /// keychain will have to be signed by either the root of trust or another key signed by the + /// root of trust. + pub fn new(trust_root: &PublicKey) -> Result { + let mut keychain = Self { + keys: HashMap::new(), + }; + + if trust_root.role != KeyRole::Root { + return Err(Error::WrongKeyRoleForTrustRoot(trust_root.role)); + } + keychain.load_inner(trust_root)?; + + Ok(keychain) + } + + /// Add a new signed key to the keychain. + /// + /// The key has to be signed by either the root of trust or another key with the root role + /// already part of the keychain. + pub fn load(&mut self, key: &SignedPayload) -> Result { + let key = key.get_verified(self)?; + self.load_inner(&key) + } + + fn load_inner(&mut self, key: &PublicKey) -> Result { + if !key.is_supported() { + return Err(Error::UnsupportedKey); + } + let id = key.calculate_id(); + self.keys.insert(id.clone(), key.clone()); + Ok(id) + } +} + +impl PublicKeysRepository for Keychain { + fn get<'a>(&'a self, id: &KeyId) -> Option<&'a PublicKey> { + self.keys.get(id) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::keys::{EphemeralKeyPair, KeyAlgorithm, KeyPair}; + + #[test] + fn test_new_with_root_key_as_trust_root() { + let root = generate_key(KeyRole::Root); + + let keychain = Keychain::new(root.public()).unwrap(); + assert_eq!( + Some(root.public()), + keychain.get(&root.public().calculate_id()) + ); + } + + #[test] + fn test_new_with_non_root_key_as_trust_root() { + let non_root = generate_key(KeyRole::Packages); + + assert!(matches!( + Keychain::new(non_root.public()), + Err(Error::WrongKeyRoleForTrustRoot(KeyRole::Packages)) + )); + } + + #[test] + fn test_add_key_trusted_by_root() { + let root = generate_key(KeyRole::Root); + let mut keychain = Keychain::new(root.public()).unwrap(); + + let (key, public) = generate_trusted_key(KeyRole::Packages, &root); + keychain.load(&public).unwrap(); + + assert_eq!( + Some(key.public()), + keychain.get(&key.public().calculate_id()) + ); + } + + #[test] + fn test_add_key_trusted_by_root_key_trusted_by_root() { + let root = generate_key(KeyRole::Root); + let mut keychain = Keychain::new(root.public()).unwrap(); + + let (key1, public1) = generate_trusted_key(KeyRole::Root, &root); + keychain.load(&public1).unwrap(); + + let (key2, public2) = generate_trusted_key(KeyRole::Packages, &key1); + keychain.load(&public2).unwrap(); + + assert_eq!( + Some(key2.public()), + keychain.get(&key2.public().calculate_id()) + ); + } + + #[test] + fn test_add_key_trusted_by_non_root_key_trusted_by_root() { + let root = generate_key(KeyRole::Root); + let mut keychain = Keychain::new(root.public()).unwrap(); + + let (key1, public1) = generate_trusted_key(KeyRole::Packages, &root); + keychain.load(&public1).unwrap(); + + let (_, public2) = generate_trusted_key(KeyRole::Packages, &key1); + assert!(matches!( + keychain.load(&public2), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_add_key_trusted_by_nothing_else() { + let mut keychain = Keychain::new(generate_key(KeyRole::Root).public()).unwrap(); + + let another_root = generate_key(KeyRole::Root); + let (_, public) = generate_trusted_key(KeyRole::Packages, &another_root); + assert!(matches!( + keychain.load(&public), + Err(Error::VerificationFailed) + )); + } + + #[test] + fn test_add_key_with_unsupported_algorithm() { + let root = generate_key(KeyRole::Root); + let mut keychain = Keychain::new(root.public()).unwrap(); + + let mut other: SignedPayload = SignedPayload::new( + &serde_json::from_str( + r#"{"algorithm": "foo", "role": "root", "expiry": null, "public": "aGk="}"#, + ) + .unwrap(), + ) + .unwrap(); + other.add_signature(&root).unwrap(); + + assert!(matches!(keychain.load(&other), Err(Error::UnsupportedKey))); + } + + // Utilities + + fn generate_key(role: KeyRole) -> EphemeralKeyPair { + EphemeralKeyPair::generate(KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, role, None).unwrap() + } + + fn generate_trusted_key( + role: KeyRole, + trusted_by: &dyn KeyPair, + ) -> (EphemeralKeyPair, SignedPayload) { + let key = generate_key(role); + let mut payload = SignedPayload::new(key.public()).unwrap(); + payload.add_signature(trusted_by).unwrap(); + (key, payload) + } +} diff --git a/crates/criticaltrust/src/signatures/mod.rs b/crates/criticaltrust/src/signatures/mod.rs new file mode 100644 index 00000000..99664843 --- /dev/null +++ b/crates/criticaltrust/src/signatures/mod.rs @@ -0,0 +1,11 @@ +//! High-level abstraction for key management and digital signature verification. +//! +//! This module provides [`SignedPayload`], a wrapper around digitally signed payloads that +//! enforces signatures are properly verified before the inner contents are accessible. +//! [`Keychain`] is also provided to establish a root of trust. + +mod keychain; +mod payload; + +pub use keychain::Keychain; +pub use payload::{PublicKeysRepository, Signable, SignedPayload}; diff --git a/crates/criticaltrust/src/signatures/payload.rs b/crates/criticaltrust/src/signatures/payload.rs new file mode 100644 index 00000000..febef7fc --- /dev/null +++ b/crates/criticaltrust/src/signatures/payload.rs @@ -0,0 +1,453 @@ +use crate::keys::newtypes::{PayloadBytes, SignatureBytes}; +use crate::keys::{KeyId, KeyPair, KeyRole, PublicKey}; +use crate::Error; +use serde::{Deserialize, Serialize}; +use std::cell::{Ref, RefCell}; + +/// Piece of data with signatures attached to it. +/// +/// To prevent misuses, there is no way to access the data inside the payload unless signatures are +/// verified. The signed payload can be freely serialized and deserialized. +#[derive(Serialize, Deserialize, Clone)] +#[serde(bound = "T: Signable")] +pub struct SignedPayload { + signatures: Vec, + signed: String, + #[serde(skip)] + verified_deserialized: RefCell>, +} + +impl std::fmt::Debug for SignedPayload { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SignedPayload") + .field("signatures", &self.signatures) + .field("signed", &self.signed) + .finish_non_exhaustive() + } +} + +impl SignedPayload { + /// Create a new signed payload. Note that no signature is generated by this method call: + /// you'll also need to call [`add_signature`](Self::add_signature) with a valid [`KeyPair`] to + /// generate a valid signed payload. + pub fn new(to_sign: &T) -> Result { + Ok(Self { + signatures: Vec::new(), + signed: serde_json::to_string(to_sign) + .map_err(Error::SignedPayloadSerializationFailed)?, + verified_deserialized: RefCell::new(None), + }) + } + + /// Add a new signature to this signed paylaod, generated using the provided [`KeyPair`]. + pub fn add_signature(&mut self, keypair: &dyn KeyPair) -> Result<(), Error> { + self.signatures.push(Signature { + key_sha256: keypair.public().calculate_id(), + signature: keypair.sign(&PayloadBytes::borrowed(self.signed.as_bytes()))?, + }); + Ok(()) + } + + /// Verifies the signatures attached to the signed payload and returns the deserialized data + /// (if the signature matched). + /// + /// As signature verification and deserialization is expensive, it is only performed the first + /// time the method is called. The cached results from the initial call will be returned in the + /// rest of the cases. + pub fn get_verified(&self, keys: &dyn PublicKeysRepository) -> Result, Error> { + let borrow = self.verified_deserialized.borrow(); + + if borrow.is_none() { + let value = verify_signature( + keys, + &self.signatures, + PayloadBytes::borrowed(self.signed.as_bytes()), + )?; + + // In theory, `borrow_mut()` could panic if an immutable borrow was alive at the same + // time. In practice that won't happen, as we only populate the cache before returning + // any reference to the cached data. + drop(borrow); + *self.verified_deserialized.borrow_mut() = Some(value); + } + + Ok(Ref::map(self.verified_deserialized.borrow(), |b| { + b.as_ref().unwrap() + })) + } + + /// Consumes the signed payload and returns the deserialized payload. + /// + /// If the signature verification was already performed before (through the + /// [`get_verified`](Self::get_verified) method), the cached deserialized payload will be + /// returned. Otherwise, signature verification will be performed with the provided keychain + /// before deserializing. + pub fn into_verified(self, keys: &dyn PublicKeysRepository) -> Result { + if let Some(deserialized) = self.verified_deserialized.into_inner() { + Ok(deserialized) + } else { + verify_signature( + keys, + &self.signatures, + PayloadBytes::borrowed(self.signed.as_bytes()), + ) + } + } +} + +fn verify_signature( + keys: &dyn PublicKeysRepository, + signatures: &[Signature], + signed: PayloadBytes<'_>, +) -> Result { + for signature in signatures { + let key = match keys.get(&signature.key_sha256) { + Some(key) => key, + None => continue, + }; + + match key.verify(T::SIGNED_BY_ROLE, &signed, &signature.signature) { + Ok(()) => {} + Err(Error::VerificationFailed) => continue, + Err(other) => return Err(other), + } + + // Deserialization is performed after the signature is verified, to ensure we are not + // deserializing malicious data. + return serde_json::from_slice(signed.as_bytes()).map_err(Error::DeserializationFailed); + } + + Err(Error::VerificationFailed) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct Signature { + key_sha256: KeyId, + #[serde(with = "crate::serde_base64")] + signature: SignatureBytes<'static>, +} + +/// Trait representing contents that can be wrapped in a [`SignedPayload`]. +pub trait Signable: Serialize + for<'de> Deserialize<'de> { + /// Key role authorized to verify this type. + const SIGNED_BY_ROLE: KeyRole; +} + +/// Trait representing a collection of public keys that can be used to verify signatures. +/// +/// You likely want to use a [`Keychain`](crate::signatures::Keychain) as the public keys +/// repository, as it allows to establish a root of trust and supports multiple keys. For simple +/// cases or tests, individual [`PublicKey`]s also implement this trait. +pub trait PublicKeysRepository { + /// Retrieve a key by its ID. + fn get<'a>(&'a self, id: &KeyId) -> Option<&'a PublicKey>; +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::keys::{EphemeralKeyPair, PublicKey}; + use crate::signatures::Keychain; + use crate::test_utils::{base64_encode, TestEnvironment}; + + const SAMPLE_DATA: &str = r#"{"answer":42}"#; + + #[test] + fn tets_verify_no_signatures() { + let test_env = TestEnvironment::prepare(); + assert_verify_fail(&test_env, &[]); + } + + #[test] + fn test_verify_one_valid_signature() { + let mut test_env = TestEnvironment::prepare(); + + let key = test_env.create_key(KeyRole::Packages); + assert_verify_pass(&test_env, &[&key]); + } + + #[test] + fn test_verify_multiple_valid_signatures() { + let mut test_env = TestEnvironment::prepare(); + + let key1 = test_env.create_key(KeyRole::Packages); + let key2 = test_env.create_key(KeyRole::Packages); + + assert_verify_pass(&test_env, &[&key1, &key2]); + assert_verify_pass(&test_env, &[&key2, &key1]); + } + + // Key roles + + #[test] + fn test_verify_with_invalid_key_role() { + let mut test_env = TestEnvironment::prepare(); + + let key = test_env.create_key(KeyRole::Redirects); + assert_verify_fail(&test_env, &[&key]); + } + + #[test] + fn test_verify_with_invalid_and_valid_key_roles() { + let mut test_env = TestEnvironment::prepare(); + + let valid = test_env.create_key(KeyRole::Packages); + let invalid = test_env.create_key(KeyRole::Redirects); + assert_verify_pass(&test_env, &[&valid, &invalid]); + assert_verify_pass(&test_env, &[&invalid, &valid]); + } + + // Trusted/untrusted + #[test] + fn test_verify_with_untrusted_key() { + let test_env = TestEnvironment::prepare(); + + let untrusted = test_env.create_untrusted_key(KeyRole::Packages); + assert_verify_fail(&test_env, &[&untrusted]); + } + + #[test] + fn test_verify_with_trusted_and_untrusted_keys() { + let mut test_env = TestEnvironment::prepare(); + + let trusted = test_env.create_key(KeyRole::Packages); + let untrusted = test_env.create_untrusted_key(KeyRole::Packages); + + assert_verify_pass(&test_env, &[&trusted, &untrusted]); + assert_verify_pass(&test_env, &[&untrusted, &trusted]); + } + + #[test] + fn test_verify_with_subset_of_trusted_keys() { + let mut test_env = TestEnvironment::prepare(); + + let used_key = test_env.create_key(KeyRole::Packages); + let _other_trusted_key = test_env.create_key(KeyRole::Packages); + + assert_verify_pass(&test_env, &[&used_key]); + } + + // Expiry + + #[test] + fn test_verify_with_expired_key() { + let mut test_env = TestEnvironment::prepare(); + + let expired = test_env.create_key_with_expiry(KeyRole::Packages, -1); + assert_verify_fail(&test_env, &[&expired]); + } + + #[test] + fn test_verify_with_not_expired_key() { + let mut env = TestEnvironment::prepare(); + + let not_expired = env.create_key_with_expiry(KeyRole::Packages, 1); + assert_verify_pass(&env, &[¬_expired]); + } + + #[test] + fn test_verify_with_expired_and_not_expired_keys() { + let mut test_env = TestEnvironment::prepare(); + + let expired = test_env.create_key_with_expiry(KeyRole::Packages, -1); + let not_expired = test_env.create_key_with_expiry(KeyRole::Packages, 1); + + assert_verify_pass(&test_env, &[&expired, ¬_expired]); + assert_verify_pass(&test_env, &[¬_expired, &expired]); + } + + // Signature + + #[test] + fn test_verify_with_bad_signature() { + let mut test_env = TestEnvironment::prepare(); + + let bad = BadKeyPair(test_env.create_key(KeyRole::Packages)); + assert_verify_fail(&test_env, &[&bad]); + } + + #[test] + fn test_verify_with_bad_and_good_signature() { + let mut test_env = TestEnvironment::prepare(); + + let bad = BadKeyPair(test_env.create_key(KeyRole::Packages)); + let good = test_env.create_key(KeyRole::Packages); + assert_verify_pass(&test_env, &[&bad, &good]); + assert_verify_pass(&test_env, &[&good, &bad]); + } + + // Caching + + #[test] + fn test_caching() { + let mut test_env = TestEnvironment::prepare(); + + let key = test_env.create_key(KeyRole::Packages); + let payload = prepare_payload(&[&key], SAMPLE_DATA); + + assert_eq!( + 42, + payload.get_verified(test_env.keychain()).unwrap().answer + ); + + // If there was no caching, this method call would fail, as there is no valid key to + // perform verification in an empty keychain. Still, since there is a cache no signature + // verification is performed and the previous result is returned. + assert_eq!( + 42, + payload + .get_verified(TestEnvironment::prepare().keychain()) + .unwrap() + .answer + ); + } + + // Misc tests + + #[test] + fn test_deserialization_failed() { + let mut test_env = TestEnvironment::prepare(); + let key = test_env.create_key(KeyRole::Packages); + + let payload = prepare_payload(&[&key], r#"{"answer": 42"#); + assert!(matches!( + payload.get_verified(test_env.keychain()), + Err(Error::DeserializationFailed(_)) + )); + + let payload = prepare_payload(&[&key], r#"{"answer": 42"#); + assert!(matches!( + payload.into_verified(test_env.keychain()), + Err(Error::DeserializationFailed(_)) + )); + } + + #[test] + fn test_verify_deserialized() { + let mut keychain = Keychain::new( + &serde_json::from_str( + r#"{ + "role": "root", + "algorithm": "ecdsa-p256-sha256-asn1-spki-der", + "expiry": null, + "public": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE+S7QgNLkBo2VEMdZXowZUFmvQJMm6qoQtC33hvDB95HpjPXd50eBEUnEuVRye5qC84K7ZHpoAXWf5BzmcFtvVg==" + }"#, + ) + .unwrap(), + ).unwrap(); + + keychain.load( + &serde_json::from_str( + r#"{ + "signatures": [ + { + "key_sha256": "oWLXbXl20A0Z5MNOcEC4vNjHxT3IHAo9ExDYMAyHatU=", + "signature": "MEUCIQDY3xkoVYowUQBSnHddpWVdlG9FufeucTasX9YJNOzPsQIgRj99gqJioVB6TLa9gdmPezFG68CC+tAkqGA9GwfVurs=" + } + ], + "signed": "{\"role\":\"packages\",\"algorithm\":\"ecdsa-p256-sha256-asn1-spki-der\",\"expiry\":null,\"public\":\"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAExmWCqNu5ClVwVgoMYU/cRUTTohljVT5yJy5InJPzXaXRQS7zT5WaTUxzJQqfDc7+nUgEZ6Z6XbxzG72yffrckA==\"}" + }"#, + ) + .unwrap(), + ).unwrap(); + + let payload: SignedPayload = serde_json::from_str( + r#"{ + "signatures": [ + { + "key_sha256": "xzcGUBKHYDGbucyvirl6dhsDXPCxQR/4/PRKiL9Qz2A=", + "signature": "MEYCIQCToeOQpzoZxYSBaBcb1Ko+NFtr4/fmLwaTrrvuWagzQgIhAO8AvDZHk+osFj0Wag5MU9CzQeXgCi4Cr8FCk4KhKVX6" + } + ], + "signed": "{\"answer\":42}" + }"#, + ).unwrap(); + + assert_eq!(42, payload.get_verified(&keychain).unwrap().answer); + } + + // Utilities + + #[track_caller] + fn assert_verify_pass(test_env: &TestEnvironment, keys: &[&dyn KeyPair]) { + let get_payload = prepare_payload(keys, SAMPLE_DATA); + assert_eq!( + 42, + get_payload + .get_verified(test_env.keychain()) + .unwrap() + .answer + ); + + // Two separate payloads are used to avoid caching. + let into_payload = prepare_payload(keys, SAMPLE_DATA); + assert_eq!( + 42, + into_payload + .into_verified(test_env.keychain()) + .unwrap() + .answer + ); + } + + #[track_caller] + fn assert_verify_fail(test_env: &TestEnvironment, keys: &[&dyn KeyPair]) { + let get_payload = prepare_payload(keys, SAMPLE_DATA); + assert!(matches!( + get_payload.get_verified(test_env.keychain()).unwrap_err(), + Error::VerificationFailed + )); + + // Two separate payloads are used to avoid caching. + let into_payload = prepare_payload(keys, SAMPLE_DATA); + assert!(matches!( + into_payload.into_verified(test_env.keychain()).unwrap_err(), + Error::VerificationFailed + )); + } + + fn prepare_payload(keys: &[&dyn KeyPair], data: &str) -> SignedPayload { + serde_json::from_value(serde_json::json!({ + "signatures": keys + .iter() + .map(|key| { + serde_json::json!({ + "key_sha256": key.public().calculate_id(), + "signature": base64_encode(key.sign( + &PayloadBytes::borrowed(data.as_bytes()) + ).unwrap().as_bytes()), + }) + }) + .collect::>(), + "signed": data + })) + .unwrap() + } + + #[derive(Debug, Serialize, Deserialize)] + struct TestData { + answer: i32, + } + + impl Signable for TestData { + const SIGNED_BY_ROLE: KeyRole = KeyRole::Packages; + } + + struct BadKeyPair(EphemeralKeyPair); + + impl KeyPair for BadKeyPair { + fn public(&self) -> &PublicKey { + self.0.public() + } + + fn sign(&self, data: &PayloadBytes<'_>) -> Result, Error> { + let signature = self.0.sign(data)?; + let mut broken_signature = signature.as_bytes().to_vec(); + for byte in &mut broken_signature { + *byte = byte.wrapping_add(1); + } + + Ok(SignatureBytes::owned(broken_signature)) + } + } +} diff --git a/crates/criticaltrust/src/test_utils.rs b/crates/criticaltrust/src/test_utils.rs new file mode 100644 index 00000000..6e3d1538 --- /dev/null +++ b/crates/criticaltrust/src/test_utils.rs @@ -0,0 +1,60 @@ +use crate::keys::{EphemeralKeyPair, KeyAlgorithm, KeyPair, KeyRole, PublicKey}; +use crate::signatures::{Keychain, SignedPayload}; +use base64::Engine; +use time::{Duration, OffsetDateTime}; + +const ALGORITHM: KeyAlgorithm = KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer; + +pub(crate) struct TestEnvironment { + root: EphemeralKeyPair, + keychain: Keychain, +} + +impl TestEnvironment { + pub(crate) fn prepare() -> Self { + let root = EphemeralKeyPair::generate(ALGORITHM, KeyRole::Root, None).unwrap(); + let keychain = Keychain::new(root.public()).unwrap(); + + Self { root, keychain } + } + + pub(crate) fn keychain(&self) -> &Keychain { + &self.keychain + } + + pub(crate) fn create_untrusted_key(&self, role: KeyRole) -> EphemeralKeyPair { + EphemeralKeyPair::generate(ALGORITHM, role, None).unwrap() + } + + pub(crate) fn create_key(&mut self, role: KeyRole) -> EphemeralKeyPair { + let key = self.create_untrusted_key(role); + self.sign_and_add_key(key.public()); + key + } + + pub(crate) fn create_key_with_expiry( + &mut self, + role: KeyRole, + expiry_diff_hours: i64, + ) -> EphemeralKeyPair { + let expiry = OffsetDateTime::now_utc() + Duration::hours(expiry_diff_hours); + let key = EphemeralKeyPair::generate(ALGORITHM, role, Some(expiry)).unwrap(); + self.sign_and_add_key(key.public()); + key + } + + fn sign_and_add_key(&mut self, key: &PublicKey) { + let mut payload = SignedPayload::new(key).unwrap(); + payload.add_signature(&self.root).unwrap(); + + self.keychain.load(&payload).unwrap(); + } +} + +pub(crate) fn base64_encode(data: &[u8]) -> String { + base64::engine::general_purpose::STANDARD.encode(data) +} + +pub(crate) fn base64_decode(encoded: &str) -> Result, base64::DecodeError> { + base64::engine::general_purpose::STANDARD.decode(encoded) +} diff --git a/crates/criticalup-cli/Cargo.toml b/crates/criticalup-cli/Cargo.toml new file mode 100644 index 00000000..fe05eef3 --- /dev/null +++ b/crates/criticalup-cli/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "criticalup-cli" +version = "0.0.0" +edition = "2021" +repository = "https://github.com/ferrocene/criticalup" +homepage = "https://github.com/ferrocene/criticalup" +authors = ["The CriticalUp Developers"] +description = "CriticalUp is the official tool to download and install Ferrocene" + +[package.metadata.dist] +dist = false + +[[test]] +name = "cli" +path = "tests/cli/main.rs" + +[dependencies] +atty = "0.2.14" +clap = { version = "4.2.4", features = ["std", "derive", "help", "usage"] } +criticaltrust = { path = "../criticaltrust" } +criticalup-core = { path = "../criticalup-core" } +owo-colors = { version = "4.0.0", default-features = false, features = ["supports-colors"] } +serde_json = "1.0.79" +tar = "0.4.40" +thiserror = "1.0.30" +xz2 = "0.1.7" + +[dev-dependencies] +insta = { version = "1.12.0", features = ["filters"] } +mock-download-server = { path = "../mock-download-server" } +serde = { version = "1.0.136", features = ["derive"] } +tempfile = "3.3.0" +regex = "1.7.0" + +[target.x86_64-pc-windows-msvc.dependencies] +windows-sys = { version = "0.52.0", features = ["Win32_Foundation", "Win32_System_Console"] } +winapi = "0.3.9" diff --git a/crates/criticalup-cli/README.md b/crates/criticalup-cli/README.md new file mode 100644 index 00000000..a4cd2503 --- /dev/null +++ b/crates/criticalup-cli/README.md @@ -0,0 +1,15 @@ +`criticalup-cli` +---------------- + +A command line tool similar to `rustup` to manage installations of Ferrocene toolchains. + +> [!NOTE] +> The documentation here is primarily intended for developers of the `criticalup-cli` crate. +> +> Ferrocene users should refer to [the documentation][ferrocene-public-docs] for all their needs. + +`criticalup-cli` is a library used for *whitelabel-able* binaries, and should not be directly installed. + +In general, developers and users will use either [`criticalup`](../criticalup/) or [`criticalup-dev`](../criticalup-dev/). + +This crate is *technically* installable, however the binary `criticalup-test` is only intended for the test suite and should not be used by developers or users. diff --git a/crates/criticalup-cli/src/bin/criticalup-test.rs b/crates/criticalup-cli/src/bin/criticalup-test.rs new file mode 100644 index 00000000..94b64f71 --- /dev/null +++ b/crates/criticalup-cli/src/bin/criticalup-test.rs @@ -0,0 +1,26 @@ +//! Variant of the criticalup binary with mocking support, used by the criticalup test suite to +//! perform tests without connecting to the production download servers. + +fn main() { + if std::env::var_os("CRITICALUP_TESTING_IN_PROGRESS").is_none() { + panic!("This is an internal test tool. Do not run manually."); + } + + let whitelabel = criticalup_cli::WhitelabelConfig { + name: "criticalup-test", + http_user_agent: concat!("criticalup-test/", env!("CARGO_PKG_VERSION")), + download_server_url: std::env::var("CRITICALUP_TEST_DOWNLOAD_SERVER_URL") + .expect("missing CRITICALUP_TEST_DOWNLOAD_SERVER_URL"), + customer_portal_url: std::env::var("CRITICALUP_TEST_CUSTOMER_PORTAL_URL") + .expect("missing CRITICALUP_TEST_CUSTOMER_PORTAL_URL"), + trust_root: serde_json::from_str( + &std::env::var("CRITICALUP_TEST_TRUST_ROOT") + .expect("missing CRITICALUP_TEST_TRUST_ROOT"), + ) + .expect("CRITICALUP_TEST_TRUST_ROOT should be a valid JSON encoded PublicKey object"), + test_mode: true, + }; + + let args = std::env::args_os().collect::>(); + std::process::exit(criticalup_cli::main(whitelabel, &args)); +} diff --git a/crates/criticalup-cli/src/binary_proxies.rs b/crates/criticalup-cli/src/binary_proxies.rs new file mode 100644 index 00000000..65a63137 --- /dev/null +++ b/crates/criticalup-cli/src/binary_proxies.rs @@ -0,0 +1,142 @@ +use crate::errors::Error; +use crate::spawn; +use criticalup_core::config::{Config, WhitelabelConfig}; +use criticalup_core::project_manifest::ProjectManifest; +use criticalup_core::state::State; +use std::env::JoinPathsError; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; + +pub(crate) fn proxy(whitelabel: WhitelabelConfig) -> Result<(), Error> { + let binary_name = arg0(&whitelabel)?; + let args: Vec<_> = std::env::args_os().skip(1).collect(); + + let config = Config::detect(whitelabel)?; + let state = State::load(&config)?; + + let manifest_path = ProjectManifest::discover_canonical_path( + std::env::var_os("CRITICALUP_CURRENT_PROJ_MANIFEST_CANONICAL_PATH") + .map(std::path::PathBuf::from) + .as_deref(), + )?; + + let project_manifest = ProjectManifest::load(manifest_path.as_path())?; + + let Some((installation_id, resolved_path)) = project_manifest + .products() + .iter() + .map(|p| p.installation_id()) + .filter_map(|id| { + state + .resolve_binary_proxy(&id, &binary_name) + .map(|p| (id, p)) + }) + .next() + else { + return Err(Error::BinaryNotInstalled(binary_name)); + }; + + let mut command = Command::new( + config + .paths + .installation_dir + .join(installation_id.clone()) + .join(resolved_path), + ); + + // In order to ensure, for example, our `cargo` invokes our `rustc` we + // append the proxy dir to the path. + // + // For some particularly niche use cases, users may find themselves wanting + // to override the `rustc` called, and they may want to do that by setting + // `PATH` themselves, but they: + // 1) Shouldn't do that, and + // 2) Can set `RUSTC` which `cargo` already supports. + let additional_bin_path = config.paths.proxies_dir.clone(); + // We need to also set the library path according to + // https://doc.rust-lang.org/cargo/reference/environment-variables.html#dynamic-library-paths + // Notably: On Windows this is the same as the binary path. + let additional_lib_path = config + .paths + .installation_dir + .clone() + .join(installation_id) + .join("lib"); + + #[cfg(target_os = "macos")] + prepend_path_to_var_for_command( + &mut command, + "DYLD_FALLBACK_LIBRARY_PATH", + vec![additional_lib_path], + )?; + #[cfg(target_os = "linux")] + prepend_path_to_var_for_command(&mut command, "LD_LIBRARY_PATH", vec![additional_lib_path])?; + #[cfg(any(target_os = "linux", target_os = "macos"))] + prepend_path_to_var_for_command(&mut command, "PATH", vec![additional_bin_path])?; + + #[cfg(target_os = "windows")] + prepend_path_to_var_for_command( + &mut command, + "PATH", + vec![additional_bin_path, additional_lib_path], + )?; + + // CRITICALUP_CURRENT_PROJ_MANIFEST_CANONICAL_PATH is an environment variable set by CriticalUp + // to make sure that the canonical manifest path is available to CriticalUp when using cargo + // with project with dependencies. + // + // This is required because cargo changes the current directory to the project dependency + // location. The repercussion is that the criticalup.toml manifest will not be found if the user + // runs cargo commands due to this directory switching. + // + // Important: Users must never set this on their own! + command + .env( + "CRITICALUP_CURRENT_PROJ_MANIFEST_CANONICAL_PATH", + manifest_path, + ) + .args(args) + .stdin(Stdio::inherit()) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()); + + spawn::spawn_command(command) +} + +pub(crate) fn arg0(whitelabel: &WhitelabelConfig) -> Result { + let mut arg0 = std::env::args_os() + .next() + .expect("missing arg0, should never happen"); + + // Helper to allow us to simulate binary proxies in the test suite without having to copy or + // symlink files around. Due to being gated under test_mode it will not be available in + // production binaries. + if whitelabel.test_mode { + if let Some(overridden) = std::env::var_os("CRITICALUP_TEST_OVERRIDE_ARG0") { + arg0 = overridden; + } + } + + let arg0 = Path::new(&arg0); + arg0.file_name() + .unwrap_or(arg0.as_os_str()) + .to_str() + .ok_or(Error::NonUtf8Arg0) + .map(|s| s.to_string()) +} + +fn prepend_path_to_var_for_command( + command: &mut Command, + env_var: &str, + new: Vec, +) -> Result<(), JoinPathsError> { + let mut existing_vals = if let Some(existing_vals) = std::env::var_os(env_var) { + std::env::split_paths(&existing_vals).collect::>() + } else { + vec![] + }; + let mut updated_val = new; + updated_val.append(&mut existing_vals); + command.env(env_var, std::env::join_paths(updated_val)?); + Ok(()) +} diff --git a/crates/criticalup-cli/src/commands/auth.rs b/crates/criticalup-cli/src/commands/auth.rs new file mode 100644 index 00000000..579a7d48 --- /dev/null +++ b/crates/criticalup-cli/src/commands/auth.rs @@ -0,0 +1,40 @@ +use crate::errors::{Error, LibError}; +use crate::Context; +use criticalup_core::download_server_client::DownloadServerClient; +use criticalup_core::errors::DownloadServerError; +use criticalup_core::state::State; + +pub(crate) fn run(ctx: &Context) -> Result<(), Error> { + let state = State::load(&ctx.config)?; + let download_server = DownloadServerClient::new(&ctx.config, &state); + + match download_server.get_current_token_data() { + Ok(data) => { + eprintln!("valid authentication token present"); + eprintln!(); + eprintln!("token name: {}", data.name); + eprintln!("organization name: {}", data.organization_name); + eprintln!( + "expires at: {}", + data.expires_at.as_deref().unwrap_or("none") + ); + + Ok(()) + } + Err(LibError::DownloadServerError { + kind: DownloadServerError::AuthenticationFailed, + .. + }) => { + eprintln!("error: failed to authenticate with the download server"); + eprintln!(); + eprintln!("The authentication token could be missing, invalid or expired."); + eprintln!("You can set a new authentication token by running:"); + eprintln!(); + eprintln!(" criticalup auth set"); + eprintln!(); + + Err(Error::Exit(1)) + } + Err(err) => Err(err.into()), + } +} diff --git a/crates/criticalup-cli/src/commands/auth_remove.rs b/crates/criticalup-cli/src/commands/auth_remove.rs new file mode 100644 index 00000000..fd0cb57e --- /dev/null +++ b/crates/criticalup-cli/src/commands/auth_remove.rs @@ -0,0 +1,14 @@ +use crate::errors::Error; +use crate::Context; +use criticalup_core::state::State; + +pub(crate) fn run(ctx: &Context) -> Result<(), Error> { + let state = State::load(&ctx.config)?; + + if state.authentication_token(None).is_some() { + state.set_authentication_token(None); + state.persist()?; + } + + Ok(()) +} diff --git a/crates/criticalup-cli/src/commands/auth_set.rs b/crates/criticalup-cli/src/commands/auth_set.rs new file mode 100644 index 00000000..df92ed8d --- /dev/null +++ b/crates/criticalup-cli/src/commands/auth_set.rs @@ -0,0 +1,92 @@ +use crate::errors::{Error, LibError}; +use crate::Context; +use atty::Stream; +use criticalup_core::download_server_client::DownloadServerClient; +use criticalup_core::errors::DownloadServerError; +use criticalup_core::state::{AuthenticationToken, State}; +use std::io::Write; + +pub(crate) fn run(ctx: &Context, token: Option) -> Result<(), Error> { + let state = State::load(&ctx.config)?; + let download_server = DownloadServerClient::new(&ctx.config, &state); + + let token = if let Some(token) = token { + token + } else if is_tty(ctx, Stream::Stdin) && is_tty(ctx, Stream::Stderr) { + token_from_stdin_interactive(ctx).map_err(Error::CantReadTokenFromStdin)? + } else { + token_from_stdin_programmatic().map_err(Error::CantReadTokenFromStdin)? + }; + + state.set_authentication_token(Some(AuthenticationToken::seal(&token))); + + match download_server.get_current_token_data() { + Ok(_) => Ok(state.persist()?), + + Err(LibError::DownloadServerError { + kind: DownloadServerError::AuthenticationFailed, + .. + }) => Err(Error::InvalidAuthenticationToken), + Err(err) => Err(err.into()), + } +} + +fn token_from_stdin_interactive(ctx: &Context) -> Result { + let mut stderr = std::io::stderr(); + let token_loc_message = format!( + "Visit {}/{} to create a new token, then enter it below.\n", + ctx.config.whitelabel.customer_portal_url, "users/tokens" + ); + stderr.write_all(token_loc_message.as_bytes())?; + stderr.write_all("enter the authentication token: ".as_bytes())?; + stderr.flush()?; + + let mut token = String::new(); + std::io::stdin().read_line(&mut token)?; + + // `.trim_end()` can trim more than just the last newline. + if token.ends_with('\n') { + token.pop(); + if token.ends_with('\r') { + token.pop(); + } + } else { + // Ensure a newline is printed even if the user terminated the line in another way (for + // example with an EOF / Ctrl+D) + stderr.write_all(b"\n")?; + } + + Ok(token) +} + +fn token_from_stdin_programmatic() -> Result { + let mut token = String::new(); + std::io::stdin().read_line(&mut token)?; + + // `.trim_end()` can trim more than just the last newline. + if token.ends_with('\n') { + token.pop(); + if token.ends_with('\r') { + token.pop(); + } + } + + Ok(token) +} + +fn is_tty(ctx: &Context, stream: Stream) -> bool { + if ctx.config.whitelabel.test_mode { + // If the environment variable is set, pay attention to it + if let Some(var) = std::env::var_os("CRITICALUP_TEST_MOCK_TTY") { + if var == "1" { + return true; + } else if var == "0" { + return false; + } else { + panic!("CRITICALUP_TEST_MOCK_TTY should only ever be 0 or 1, or unset"); + } + } + } + // Ask libc if this stream is a TTY + atty::is(stream) +} diff --git a/crates/criticalup-cli/src/commands/clean.rs b/crates/criticalup-cli/src/commands/clean.rs new file mode 100644 index 00000000..10bdf029 --- /dev/null +++ b/crates/criticalup-cli/src/commands/clean.rs @@ -0,0 +1,109 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use owo_colors::OwoColorize; + +use criticalup_core::project_manifest::InstallationId; +use criticalup_core::state::State; + +use crate::errors::Error; +use crate::Context; + +pub(crate) fn run(ctx: &Context) -> Result<(), Error> { + let installations_dir = &ctx.config.paths.installation_dir; + let state = State::load(&ctx.config)?; + + delete_unused_installations(installations_dir, &state)?; + delete_untracked_installation_dirs(installations_dir, state)?; + + Ok(()) +} + +/// Deletes installation from `State` wl; ith `InstallationId`s that have empty manifest section, and +/// deletes the installation directory from the disk if present. +fn delete_unused_installations(installations_dir: &Path, state: &State) -> Result<(), Error> { + let unused_installations: Vec = state + .installations() + .iter() + .filter(|item| item.1.manifests().is_empty()) + .map(|item| item.0.to_owned()) + .collect(); + + if unused_installations.is_empty() { + println!("{} no unused installations found", "info:".bold()); + return Ok(()); + } + + for installation in unused_installations { + println!( + "{} deleting unused installation {}", + "info:".bold(), + installation.0 + ); + + // Remove installation from the state. + state.remove_installation(&installation); + // The state will be saved onto the disk but the removal of the installation directory + // will be done after this which may not exist. + state.persist()?; + + // Remove installation directory from physical location. + let installation_dir_to_delete = installations_dir.join(&installation.0); + if installation_dir_to_delete.exists() { + println!( + "{} deleting unused installation directory {}", + "info:".bold(), + &installation_dir_to_delete.display() + ); + fs::remove_dir_all(&installation_dir_to_delete).map_err(|err| { + Error::DeletingUnusedInstallationDir { + path: installation_dir_to_delete, + kind: err, + } + })?; + } + } + Ok(()) +} + +/// Deletes the installation directories from the disk that do not exist in the State. +fn delete_untracked_installation_dirs( + installations_dir: &PathBuf, + state: State, +) -> Result<(), Error> { + let installations_in_state = state.installations(); + let mut are_untracked_installation_dirs_present = false; + + for item_in_installation_dir in fs::read_dir(installations_dir)? { + let item = item_in_installation_dir?; + if item.file_type()?.is_dir() { + let installation_dir_name = item.file_name(); + if let Some(name) = installation_dir_name.to_str() { + if !installations_in_state.contains_key(&InstallationId(name.into())) { + are_untracked_installation_dirs_present = true; + println!( + "{} deleting untracked installation directory {}", + "info:".bold(), + item.path().to_path_buf().display() + ); + + fs::remove_dir_all(item.path()).map_err(|err| { + Error::DeletingUntrackedInstallationDir { + path: item.path().to_path_buf(), + kind: err, + } + })?; + } + } + } + } + + if !are_untracked_installation_dirs_present { + println!( + "{} no untracked installation directories found", + "info:".bold() + ); + } + + Ok(()) +} diff --git a/crates/criticalup-cli/src/commands/install.rs b/crates/criticalup-cli/src/commands/install.rs new file mode 100644 index 00000000..6584fe36 --- /dev/null +++ b/crates/criticalup-cli/src/commands/install.rs @@ -0,0 +1,215 @@ +use std::path::{Path, PathBuf}; + +use owo_colors::OwoColorize; + +use criticaltrust::integrity::IntegrityVerifier; +use criticaltrust::manifests::{Release, ReleaseArtifactFormat}; +use criticalup_core::download_server_client::DownloadServerClient; +use criticalup_core::project_manifest::{ProjectManifest, ProjectManifestProduct}; +use criticalup_core::state::State; + +use crate::errors::Error; +use crate::errors::Error::{IntegrityErrorsWhileInstallation, PackageDependenciesNotSupported}; +use crate::Context; + +pub const DEFAULT_RELEASE_ARTIFACT_FORMAT: ReleaseArtifactFormat = ReleaseArtifactFormat::TarXz; + +pub(crate) fn run(ctx: &Context, project: Option) -> Result<(), Error> { + // TODO: If `std::io::stdout().is_terminal() == true``, provide a nice, fancy progress bar using indicatif. + // Retain existing behavior to support non-TTY usage. + + let state = State::load(&ctx.config)?; + + // Get manifest location if arg `project` is None + let manifest_path = ProjectManifest::discover_canonical_path(project.as_deref())?; + + // Parse and serialize the project manifest. + let manifest = ProjectManifest::get(project)?; + + let installation_dir = &ctx.config.paths.installation_dir; + + for product in manifest.products() { + let abs_installation_dir_path = installation_dir.join(product.installation_id()); + + if !abs_installation_dir_path.exists() { + install_product_afresh(ctx, &state, &manifest_path, product)?; + } else { + // Check if the state file has no mention of this installation. + let does_this_installation_exist_in_state = state + .installations() + .contains_key(&product.installation_id()); + if !does_this_installation_exist_in_state { + // If the installation directory exists, but the State has no installation of that + // InstallationId, then re-run the install command and go through installation. + install_product_afresh(ctx, &state, &manifest_path, product)?; + } else { + // If the installation directory exists AND there is an existing installation with + // that InstallationId, then merely update the installation in the State file to + // reflect this manifest/project. + state.update_installation_manifests(&product.installation_id(), &manifest_path)?; + println!("Skipping installation for product '{}' because it seems to be already installed.\n\ + If you want to reinstall it, please run 'criticalup remove' followed by 'criticalup install' command.", + product.name()); + } + } + // Even though we do not install the existing packages again, we still need to add + // the manifest to the state.json. + state.persist()?; + } + + criticalup_core::binary_proxies::update(&ctx.config, &state, &std::env::current_exe()?)?; + + Ok(()) +} + +fn install_product_afresh( + ctx: &Context, + state: &State, + manifest_path: &Path, + product: &ProjectManifestProduct, +) -> Result<(), Error> { + let product_name = product.name(); + let release = product.release(); + let installation_dir = &ctx.config.paths.installation_dir; + let abs_installation_dir_path = installation_dir.join(product.installation_id()); + let client = DownloadServerClient::new(&ctx.config, state); + let keys = client.get_keys()?; + + // TODO: Add tracing to support log levels, structured logging. + println!( + "{} installing product '{product_name}' ({release})", + "info:".bold() + ); + + let mut integrity_verifier = IntegrityVerifier::new(&keys); + + // Get the release manifest for the product from the server and verify it. + let release_manifest_from_server = + client.get_product_release_manifest(product_name, product.release())?; + let verified_release_manifest = release_manifest_from_server.signed.into_verified(&keys)?; + + // criticalup 0.1, return error if any of package.dependencies is not empty. + // We have to use manifest's Release because the information about dependencies + // only lives in it and not in product's packages which is only a name/String. + check_for_package_dependencies(&verified_release_manifest)?; + + let release_name = verified_release_manifest.release.as_str(); + + product.create_product_dir(&ctx.config.paths.installation_dir)?; + + for package in product.packages() { + println!( + "{} downloading component '{package}' for '{product_name}' ({release})", + "info:".bold() + ); + + let response_file = client.download_package( + product_name, + release_name, + package, + DEFAULT_RELEASE_ARTIFACT_FORMAT, + )?; + + // Archive file path, path with the archive extension. + let package_name_with_extension = + format!("{}.{}", package, DEFAULT_RELEASE_ARTIFACT_FORMAT); + let abs_artifact_compressed_file_path: PathBuf = + abs_installation_dir_path.join(&package_name_with_extension); + + // Save the downloaded package archive on disk. + std::fs::write(&abs_artifact_compressed_file_path, response_file.clone())?; + + println!( + "{} installing component '{package}' for '{product_name}' ({release})", + "info:".bold() + ); + + let decoder = xz2::read::XzDecoder::new(response_file.as_slice()); + let mut archive = tar::Archive::new(decoder); + archive.set_preserve_permissions(true); + archive.set_preserve_mtime(true); + archive.set_unpack_xattrs(true); + + let entries = archive.entries()?; + for each in entries { + let mut entry = each?; + + let p = entry.path()?.into_owned(); + let entry_path_on_disk = abs_installation_dir_path.join(p); + entry.unpack(&entry_path_on_disk)?; + + if entry_path_on_disk.is_file() { + integrity_verifier.add( + &entry_path_on_disk.display().to_string(), + entry.header().mode()?, + &std::fs::read(&entry_path_on_disk)?, + ); + } + } + + clean_archive_download(&abs_artifact_compressed_file_path)?; + } + + let verified_packages = integrity_verifier + .verify() + .map_err(IntegrityErrorsWhileInstallation)?; + + state.add_installation( + &product.installation_id(), + &verified_packages, + manifest_path, + &ctx.config, + )?; + Ok(()) +} + +fn check_for_package_dependencies(verified_release_manifest: &Release) -> Result<(), Error> { + for package in verified_release_manifest.packages.iter() { + if !package.dependencies.is_empty() { + return Err(PackageDependenciesNotSupported(package.package.clone())); + } + } + Ok(()) +} + +fn clean_archive_download(abs_artifact_compressed_file_path: &PathBuf) -> Result<(), Error> { + std::fs::remove_file(abs_artifact_compressed_file_path)?; + Ok(()) +} + +#[test] +fn dependencies_check() { + use criticaltrust::manifests::ReleasePackage; + + let dependencies = vec!["dependency_a".to_string()]; + + let good = Release { + product: "ferrocene".to_string(), + release: "nightly-2024-02-28".to_string(), + commit: "123".to_string(), + packages: vec![ReleasePackage { + package: "awesome".to_string(), + artifacts: vec![], + dependencies: vec![], + }], + }; + + assert!(check_for_package_dependencies(&good).is_ok()); + + let bad = Release { + product: "ferrocene".to_string(), + release: "nightly-2024-02-28".to_string(), + commit: "123".to_string(), + packages: vec![ReleasePackage { + package: "awesome".to_string(), + artifacts: vec![], + dependencies, + }], + }; + + assert!(check_for_package_dependencies(&bad).is_err()); + assert!(matches!( + check_for_package_dependencies(&bad), + Err(PackageDependenciesNotSupported(..)) + )); +} diff --git a/crates/criticalup-cli/src/commands/mod.rs b/crates/criticalup-cli/src/commands/mod.rs new file mode 100644 index 00000000..66d9f878 --- /dev/null +++ b/crates/criticalup-cli/src/commands/mod.rs @@ -0,0 +1,8 @@ +pub(crate) mod auth; +pub(crate) mod auth_remove; +pub(crate) mod auth_set; +pub(crate) mod clean; +pub(crate) mod install; +pub(crate) mod remove; +pub(crate) mod run; +pub(crate) mod which; diff --git a/crates/criticalup-cli/src/commands/remove.rs b/crates/criticalup-cli/src/commands/remove.rs new file mode 100644 index 00000000..153f2f6c --- /dev/null +++ b/crates/criticalup-cli/src/commands/remove.rs @@ -0,0 +1,38 @@ +use crate::errors::Error; +use crate::Context; +use criticalup_core::project_manifest::ProjectManifest; +use criticalup_core::state::State; +use owo_colors::OwoColorize; +use std::fs; +use std::path::PathBuf; + +pub(crate) fn run(ctx: &Context, project: Option) -> Result<(), Error> { + let state = State::load(&ctx.config)?; + let manifest_path = ProjectManifest::discover_canonical_path(project.as_deref())?; + let installation_dir = &ctx.config.paths.installation_dir; + + let installations_from_which_manifest_was_deleted = + state.remove_manifest_from_all_installations(&manifest_path)?; + state.persist()?; + + for installation_id in &installations_from_which_manifest_was_deleted { + println!( + "{} deleting installation {}", + "info:".bold(), + installation_id.0 + ); + let installation_path = installation_dir.join(installation_id.0.as_str()); + if installation_path.exists() { + fs::remove_dir_all(&installation_path)?; + } + } + + if installations_from_which_manifest_was_deleted.is_empty() { + println!( + "{} no existing installations found to be deleted", + "info:".bold() + ); + } + + Ok(()) +} diff --git a/crates/criticalup-cli/src/commands/run.rs b/crates/criticalup-cli/src/commands/run.rs new file mode 100644 index 00000000..878898b0 --- /dev/null +++ b/crates/criticalup-cli/src/commands/run.rs @@ -0,0 +1,52 @@ +use crate::errors::Error; +use crate::errors::Error::BinaryNotInstalled; +use crate::spawn::spawn_command; +use crate::Context; +use criticalup_core::project_manifest::ProjectManifest; +use std::path::PathBuf; +use std::process::{Command, Stdio}; + +pub(crate) fn run( + ctx: &Context, + command: Vec, + project: Option, +) -> Result<(), Error> { + // We try to fetch the manifest early on because it makes failing fast easy. Given that we need + // this variable to set the env var later for child process, it is important to try to get the + // canonical path first. + let manifest_path = ProjectManifest::discover_canonical_path(project.as_deref())?; + + // This dir has all the binaries that are proxied. + let proxies_dir = &ctx.config.paths.proxies_dir; + + if let Some(binary_command) = command.first() { + let binary_path = proxies_dir.join(binary_command); + + if binary_path.exists() { + let args = command.get(1..).unwrap_or(&[]); + let mut cmd = Command::new(binary_path); + cmd.args(args) + .stdout(Stdio::inherit()) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()); + + // Set the manifest path env CRITICALUP_CURRENT_PROJ_MANIFEST_CANONICAL_PATH var which is used + // by the function `crates::criticalup-cli::binary_proxies::proxy` to find the correct project + // manifest. + // + // Important: This env var is strictly for internal use! + if manifest_path.exists() { + cmd.env( + "CRITICALUP_CURRENT_PROJ_MANIFEST_CANONICAL_PATH", + manifest_path.as_os_str(), + ); + } + + spawn_command(cmd)?; + } else { + return Err(BinaryNotInstalled(binary_command.into())); + } + } + + Ok(()) +} diff --git a/crates/criticalup-cli/src/commands/which.rs b/crates/criticalup-cli/src/commands/which.rs new file mode 100644 index 00000000..65432fa0 --- /dev/null +++ b/crates/criticalup-cli/src/commands/which.rs @@ -0,0 +1,24 @@ +use crate::errors::Error; +use crate::errors::Error::BinaryNotInstalled; +use crate::Context; +use criticalup_core::project_manifest::ProjectManifest; +use std::path::PathBuf; + +pub(crate) fn run(ctx: &Context, tool: String, project: Option) -> Result<(), Error> { + let manifest = ProjectManifest::get(project)?; + + let installation_dir = &ctx.config.paths.installation_dir; + + for product in manifest.products() { + let abs_installation_dir_path = installation_dir.join(product.installation_id()); + let tools_bin_path = abs_installation_dir_path.join(format!("bin/{}", tool)); + + if tools_bin_path.exists() { + println!("{}\n", tools_bin_path.display()); + } else { + return Err(BinaryNotInstalled(tool)); + } + } + + Ok(()) +} diff --git a/crates/criticalup-cli/src/errors.rs b/crates/criticalup-cli/src/errors.rs new file mode 100644 index 00000000..c7c1308a --- /dev/null +++ b/crates/criticalup-cli/src/errors.rs @@ -0,0 +1,82 @@ +use criticaltrust::integrity::IntegrityError; +pub(crate) use criticaltrust::Error as TrustError; +pub(crate) use criticalup_core::errors::BinaryProxyUpdateError; +pub(crate) use criticalup_core::errors::Error as LibError; +use std::path::PathBuf; +use std::string::FromUtf8Error as Utf8Error; + +#[derive(Debug, thiserror::Error)] +pub(crate) enum Error { + #[error(transparent)] + Lib(#[from] LibError), + #[error(transparent)] + BinaryProxyUpdate(#[from] BinaryProxyUpdateError), + #[error(transparent)] + Trust(#[from] TrustError), + #[error(transparent)] + Utf8(#[from] Utf8Error), + + #[error(transparent)] + Io(#[from] std::io::Error), + + #[error(transparent)] + JoinPaths(#[from] std::env::JoinPathsError), + + #[error("exiting with code {0}")] + Exit(i32), + #[error("failed to parse command line arguments")] + CliArgumentParsing(#[source] clap::Error), + + #[error("failed to read the token from stdin")] + CantReadTokenFromStdin(#[source] std::io::Error), + #[error("invalid authentication token provided")] + InvalidAuthenticationToken, + + #[error("some files did not pass the integrity checks after the download\n \ + please clean your installation directory and re-install the project again\n \ + the following errors were found:\n\n{}", + .0.iter().map(|err| { err.to_string() }).collect::>().join("\n") + )] + IntegrityErrorsWhileInstallation(Vec), + + #[error("arg0 is not encoded in UTF-8")] + NonUtf8Arg0, + #[error("failed to invoke proxied command {}", .0.display())] + FailedToInvokeProxiedCommand(PathBuf, #[source] std::io::Error), + #[error( + "'{0}' is not installed for this project.\n\n\ + Please make sure that the correct package for '{0}' is listed in the packages section of your \ + project's criticalup.toml and run 'criticalup install' command again.\n" + )] + BinaryNotInstalled(String), + + // This is not *technically* needed, but it provides useful insights when an error happens when + // invoking a binary proxy. Otherwise people could think the error comes from rustc/cargo/etc. + #[error("criticalup could not invoke the binary you requested")] + BinaryProxyInvocationFailed(#[source] Box), + + #[error( + "dependencies are not supported in the current criticalup release.\n \ + found package {0} with dependencies in the manifest.\n \ + please updated criticalup to the latest version to resolve this error." + )] + PackageDependenciesNotSupported(String), + + #[error("there was an error while trying to delete the unused installation directory at {}", path.display())] + DeletingUnusedInstallationDir { + path: PathBuf, + #[source] + kind: std::io::Error, + }, + + #[error("there was an error while trying to delete the untracked installation directory at {}", path.display())] + DeletingUntrackedInstallationDir { + path: PathBuf, + #[source] + kind: std::io::Error, + }, + + #[cfg(windows)] + #[error("Could not set Ctrl-C handler.")] + CtrlHandler, +} diff --git a/crates/criticalup-cli/src/lib.rs b/crates/criticalup-cli/src/lib.rs new file mode 100644 index 00000000..6a753fde --- /dev/null +++ b/crates/criticalup-cli/src/lib.rs @@ -0,0 +1,165 @@ +mod binary_proxies; +mod commands; +mod errors; +mod spawn; + +use crate::errors::Error; +use clap::{command, Command, CommandFactory, FromArgMatches, Parser, Subcommand}; +use criticalup_core::config::Config; +pub use criticalup_core::config::WhitelabelConfig; +use std::ffi::OsString; +use std::path::PathBuf; + +/// Use a custom help template to solve some issues with Clap's default one, namely the +/// command-subcommand-subsubcomand at the top of each heading. +/// +/// The syntax is available in the documentation for [`clap::Command::help_template`]. +const HELP_TEMPLATE: &str = "{about}\n\n{usage-heading}\n{tab}{usage}\n\n{all-args}"; + +fn main_inner(whitelabel: WhitelabelConfig, args: &[OsString]) -> Result<(), Error> { + let arg0 = binary_proxies::arg0(&whitelabel)?; + #[cfg(windows)] + let arg0 = arg0 + .strip_suffix(".exe") + .map(|v| v.to_string()) + .unwrap_or(arg0); + + if arg0 != whitelabel.name { + return binary_proxies::proxy(whitelabel) + .map_err(|e| Error::BinaryProxyInvocationFailed(Box::new(e))); + } + + let mut command = Cli::command().name(whitelabel.name); + override_help_template(&mut command); + + let matches = command + .try_get_matches_from(args) + .map_err(Error::CliArgumentParsing)?; + let cli = Cli::from_arg_matches(&matches).map_err(Error::CliArgumentParsing)?; + + let config = Config::detect(whitelabel)?; + let ctx = Context { config }; + + match cli.commands { + Commands::Auth { commands } => match commands { + Some(AuthCommands::Set { token }) => commands::auth_set::run(&ctx, token)?, + Some(AuthCommands::Remove) => commands::auth_remove::run(&ctx)?, + None => commands::auth::run(&ctx)?, + }, + Commands::Install { project } => commands::install::run(&ctx, project)?, + Commands::Clean => commands::clean::run(&ctx)?, + Commands::Remove { project } => commands::remove::run(&ctx, project)?, + Commands::Run { command, project } => commands::run::run(&ctx, command, project)?, + Commands::Which { + binary: tool, + project, + } => commands::which::run(&ctx, tool, project)?, + } + + Ok(()) +} + +pub fn main(whitelabel: WhitelabelConfig, args: &[OsString]) -> i32 { + match main_inner(whitelabel, args) { + Ok(()) => 0, + Err(Error::Exit(code)) => code, + Err(Error::CliArgumentParsing(err)) => { + eprint!("{err}"); + match err.kind() { + clap::error::ErrorKind::DisplayHelp => 0, + clap::error::ErrorKind::DisplayVersion => 0, + _ => 1, + } + } + Err(err) => { + eprintln!("error: {err}"); + + let mut err = &err as &dyn std::error::Error; + while let Some(source) = err.source() { + err = source; + eprintln!(" caused by: {source}"); + } + + 1 + } + } +} + +/// There is no Clap option to set the global help template, it has to be set for each individual +/// command and subcommand. Since that's error-prone this function updates all subcommands after +/// the fact to set the correct template. +fn override_help_template(command: &mut Command) { + *command = command.clone().help_template(HELP_TEMPLATE); + for subcommand in command.get_subcommands_mut() { + override_help_template(subcommand); + } +} + +struct Context { + config: Config, +} + +/// CriticalUp is the official tool to download and install Ferrocene. +#[derive(Parser, Debug)] +#[command(name = "criticalup-cli")] +#[command(author, version, about, long_about = None, disable_help_subcommand = true)] +struct Cli { + #[command(subcommand)] + commands: Commands, +} + +#[derive(Debug, Subcommand, Clone)] +enum Commands { + /// Show and change authentication with the download server + Auth { + #[command(subcommand)] + commands: Option, + }, + /// Install the toolchain for the given project based on the manifest `criticalup.toml` + Install { + /// Path to the manifest `criticalup.toml` + #[arg(long)] + project: Option, + }, + + /// Delete all unused and untracked installations + Clean, + + /// Run a command for a given toolchain + Run { + /// Command with possible args to run + #[clap(trailing_var_arg = true, required = true)] + command: Vec, + + /// Path to the manifest `criticalup.toml` + #[arg(long)] + project: Option, + }, + + /// Delete all the products specified in the manifest `criticalup.toml` + Remove { + /// Path to the manifest `criticalup.toml` + #[arg(long)] + project: Option, + }, + + /// Display which binary will be run for a given command + Which { + /// Name of the binary to find the absolute path of + binary: String, + /// Path to the manifest `criticalup.toml` + #[arg(long)] + project: Option, + }, +} + +#[derive(Debug, Subcommand, Clone)] +enum AuthCommands { + /// Remove the authentication token used to interact with the download server + Remove, + /// Set the authentication token used to interact with the download server + Set { + /// Authentication token to use; if not provided, it will be read from stdin + token: Option, + }, +} diff --git a/crates/criticalup-cli/src/spawn.rs b/crates/criticalup-cli/src/spawn.rs new file mode 100644 index 00000000..1843dbad --- /dev/null +++ b/crates/criticalup-cli/src/spawn.rs @@ -0,0 +1,52 @@ +use crate::errors::Error; +use std::process::Command; + +/// Utility function to spawn a process and have it replace the current one. +/// +/// This is for Unix based systems. For Windows based systems, please see the function below. +#[cfg(unix)] +pub(crate) fn spawn_command(mut command: Command) -> Result<(), Error> { + use std::os::unix::process::CommandExt; + use std::path::PathBuf; + + let path = PathBuf::from(command.get_program()); + + // exec() replaces the current process with the process we're about to invoke. Thus if it + // returns at all it means the invocation failed. + Err(Error::FailedToInvokeProxiedCommand(path, command.exec())) +} + +/// Utility function to spawn a child process. for Windows based systems. +/// +/// Windows does not have an `exevcp` equivalent. +/// +/// We **cannot** replace our current process. +/// +/// Instead, we use the strategy `cargo` and `rustup` use: +/// https://github.com/rust-lang/cargo/blob/403fbe2b490d6cbb715ed768462bb7f977a6d514/crates/cargo-util/src/process_builder.rs#L609-L626 +/// https://github.com/rust-lang/rustup/blob/a7c0c45b2daaa149ac9a8e14a7270c855cd2b334/src/command.rs#L37-L56 +#[cfg(windows)] +pub(crate) fn spawn_command(mut command: Command) -> Result<(), Error> { + use std::path::PathBuf; + use windows_sys::Win32::Foundation::{BOOL, FALSE, TRUE}; + use windows_sys::Win32::System::Console::SetConsoleCtrlHandler; + + unsafe extern "system" fn ctrlc_handler(_: u32) -> BOOL { + // Do nothing; let the child process handle it. + TRUE + } + + unsafe { + if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { + return Err(Error::CtrlHandler); + } + } + + let path = PathBuf::from(command.get_program()); + + // Success or failure is irrelevant, we simply want to run the task then exit. + let exit = command + .status() + .map_err(|e| Error::FailedToInvokeProxiedCommand(path, e))?; + std::process::exit(exit.code().unwrap_or(1)); +} diff --git a/crates/criticalup-cli/tests/cli/auth.rs b/crates/criticalup-cli/tests/cli/auth.rs new file mode 100644 index 00000000..c48a8e80 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/auth.rs @@ -0,0 +1,54 @@ +use crate::assert_output; +use crate::utils::{TestEnvironment, MOCK_AUTH_TOKENS}; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["auth", "--help"])); +} + +#[test] +fn no_token() { + let test_env = TestEnvironment::prepare(); + + assert_output!(test_env.cmd().arg("auth")); + assert_eq!(0, test_env.requests_served_by_mock_download_server()); +} + +#[test] +fn invalid_token() { + let test_env = TestEnvironment::prepare(); + set_token(&test_env, MOCK_AUTH_TOKENS[2].0); + test_env.revoke_token(MOCK_AUTH_TOKENS[2].0); + + assert_output!(test_env.cmd().arg("auth")); + assert_eq!(2, test_env.requests_served_by_mock_download_server()); +} + +#[test] +fn token_without_expiry() { + let test_env = TestEnvironment::prepare(); + set_token(&test_env, MOCK_AUTH_TOKENS[0].0); + + assert_output!(test_env.cmd().arg("auth")); + assert_eq!(2, test_env.requests_served_by_mock_download_server()); +} + +#[test] +fn token_with_expiry() { + let test_env = TestEnvironment::prepare(); + set_token(&test_env, MOCK_AUTH_TOKENS[1].0); + + assert_output!(test_env.cmd().arg("auth")); + assert_eq!(2, test_env.requests_served_by_mock_download_server()); +} + +fn set_token(test_env: &TestEnvironment, token: &str) { + assert!(test_env + .cmd() + .args(["auth", "set", token]) + .output() + .expect("failed to set the token") + .status + .success()); +} diff --git a/crates/criticalup-cli/tests/cli/auth_remove.rs b/crates/criticalup-cli/tests/cli/auth_remove.rs new file mode 100644 index 00000000..25a86e5f --- /dev/null +++ b/crates/criticalup-cli/tests/cli/auth_remove.rs @@ -0,0 +1,45 @@ +use crate::assert_output; +use crate::utils::{TestEnvironment, MOCK_AUTH_TOKENS}; +use serde::Deserialize; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["auth", "remove", "--help"])); +} + +#[test] +fn token_missing() { + let test_env = TestEnvironment::prepare(); + + assert_output!(test_env.cmd().args(["auth", "remove"])); + assert_eq!(0, test_env.requests_served_by_mock_download_server()); + + // Ensure no state file was created by just running remove. + assert!(!test_env.root().join("state.json").exists()); +} + +#[test] +fn token_present() { + #[derive(Deserialize)] + struct State { + authentication_token: Option, + } + + let test_env = TestEnvironment::prepare(); + assert!(test_env + .cmd() + .args(["auth", "set", MOCK_AUTH_TOKENS[0].0]) + .output() + .expect("failed to set token") + .status + .success()); + + assert_output!(test_env.cmd().args(["auth", "remove"])); + assert_eq!(1, test_env.requests_served_by_mock_download_server()); + + let state: State = + serde_json::from_slice(&std::fs::read(test_env.root().join("state.json")).unwrap()) + .unwrap(); + assert!(state.authentication_token.is_none()); +} diff --git a/crates/criticalup-cli/tests/cli/auth_set.rs b/crates/criticalup-cli/tests/cli/auth_set.rs new file mode 100644 index 00000000..c3eaa3c5 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/auth_set.rs @@ -0,0 +1,208 @@ +use crate::assert_output; +use crate::utils::{stdin, TestEnvironment, MOCK_AUTH_TOKENS}; +use regex::Regex; +use serde::Deserialize; +use std::process::Command; + +const TOKEN_A: &str = MOCK_AUTH_TOKENS[0].0; +const TOKEN_B: &str = MOCK_AUTH_TOKENS[1].0; +const TOKEN_INVALID: &str = "criticalup_token_invalid"; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["auth", "set", "--help"])); +} + +#[test] +fn byte_zero_via_stdin() { + let test_env = TestEnvironment::prepare(); + + // Byte zero is not allowed in HTTP headers: we should get a proper error message instead of a + // panic, and no requests should be made to the server. + assert_output!(test_env.cmd().args(["auth", "set"]).stdin(stdin("\0"))); + assert_eq!(0, test_env.requests_served_by_mock_download_server()); +} + +// This is a macro instead of a function because otherwise insta detects the name of the helper +// function as the name of the test. +macro_rules! run_cmd { + ($expected:ident, $env:ident, $variant:ident, $token:ident) => { + let out = build_command(&$env, $variant, $token) + .output() + .expect("failed to execute command"); + match &$expected { + Some(expected) => { + // this regex replacement dance is required because this nested macro tests + // set is instantiating the test server twice which means each run gives + // a different local port. we replace with a stable port just for this test. + let re = Regex::new(r"127.0.0.1:\d+").expect("regex creation failed."); + let left_str = String::from_utf8(out.stderr.clone()) + .expect("string creation from bytes failed."); + let right_str = String::from_utf8(expected.stderr.clone()) + .expect("string creation from bytes failed."); + let left = re.replace_all(left_str.as_str(), "127.0.0.1:1312"); + let right = re.replace_all(right_str.as_str(), "127.0.0.1:1312"); + assert_eq!(left, right); + } + None => { + assert_output!(out.clone()); + $expected = Some(out); + } + } + }; +} + +macro_rules! test_matrix { + ($($module:ident => [$($variant:expr,)*],)*) => { + $(mod $module { + use std::process::Output; + use super::*; + + #[test] + fn set_valid_token() { + let mut expected: Option = None; + for variant in [$($variant,)*] { + let test_env = TestEnvironment::prepare(); + + assert_token(&test_env, None); + run_cmd!(expected, test_env, variant, TOKEN_A); + assert_token(&test_env, Some(TOKEN_A)); + + // The download server was called to validate the token. + assert_eq!(1, test_env.requests_served_by_mock_download_server()); + } + } + + #[test] + fn set_valid_token_with_existing_token() { + let mut expected: Option = None; + for variant in [$($variant,)*] { + let test_env = TestEnvironment::prepare(); + set_token(&test_env, TOKEN_A); + + run_cmd!(expected, test_env, variant, TOKEN_B); + assert_token(&test_env, Some(TOKEN_B)); + + // The download server was called by both the `set_token` function and what we want + // to test (to validate the token). + assert_eq!(2, test_env.requests_served_by_mock_download_server()); + } + } + + #[test] + fn set_invalid_token() { + let mut expected: Option = None; + for variant in [$($variant,)*] { + let test_env = TestEnvironment::prepare(); + + assert_token(&test_env, None); + run_cmd!(expected, test_env, variant, TOKEN_INVALID); + assert_token(&test_env, None); + + // The download server was called to validate the token. + assert_eq!(1, test_env.requests_served_by_mock_download_server()); + } + } + + #[test] + fn set_invalid_token_with_existing_token() { + let mut expected: Option = None; + for variant in [$($variant,)*] { + let test_env = TestEnvironment::prepare(); + + set_token(&test_env, TOKEN_A); + run_cmd!(expected, test_env, variant, TOKEN_INVALID); + assert_token(&test_env, Some(TOKEN_A)); + + // The download server was called by both the `set_token` function and what we want + // to test (to validate the token). + assert_eq!(2, test_env.requests_served_by_mock_download_server()); + } + } + })* + }; +} + +test_matrix! { + via_args => [ + Variant::Args, + ], + via_stdin => [ + Variant::Stdin { newline: None, tty: false }, + Variant::Stdin { newline: Some("\n"), tty: false }, + Variant::Stdin { newline: Some("\r\n"), tty: false }, + ], + via_tty_eod => [ + Variant::Stdin { newline: None, tty: true }, + ], + // In these tests, the output might seem incorrect at a glance, because there's no newline + // between the prompt and the following line. That's actually correct though, because the + // newline will be part of stdin, as the user writes it. + via_tty_nl => [ + Variant::Stdin { newline: Some("\n"), tty: true }, + Variant::Stdin { newline: Some("\r\n"), tty: true }, + ], +} + +enum Variant { + Args, + Stdin { + newline: Option<&'static str>, + tty: bool, + }, +} + +fn build_command(test_env: &TestEnvironment, variant: Variant, token: &str) -> Command { + let mut cmd = test_env.cmd(); + match variant { + Variant::Args => { + cmd.args(["auth", "set", token]); + } + Variant::Stdin { newline, tty } => { + cmd.args(["auth", "set"]); + cmd.stdin(stdin(&match newline { + Some(nl) => format!("{token}{nl}"), + None => token.into(), + })); + if tty { + cmd.env("CRITICALUP_TEST_MOCK_TTY", "1"); + } + } + }; + cmd +} + +#[track_caller] +fn assert_token(test_env: &TestEnvironment, expected: Option<&str>) { + #[derive(Deserialize)] + struct State { + authentication_token: Option, + } + + let actual = match std::fs::read(test_env.root().join("state.json")) { + Ok(contents) => { + serde_json::from_slice::(&contents) + .unwrap() + .authentication_token + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => None, + Err(err) => panic!("failed to get state file: {err}"), + }; + + assert_eq!(expected, actual.as_deref()); +} + +fn set_token(test_env: &TestEnvironment, token: &str) { + assert_token(test_env, None); + + // We shouldn't write directly to state.json, as in the test we don't know which other params + // are required. Let `auth set` initialize the state instead, and ensure it worked. + let out = test_env + .cmd() + .args(["auth", "set", token]) + .output() + .unwrap(); + assert!(out.status.success()); + assert_token(test_env, Some(token)); +} diff --git a/crates/criticalup-cli/tests/cli/binary_proxies.rs b/crates/criticalup-cli/tests/cli/binary_proxies.rs new file mode 100644 index 00000000..93f646fe --- /dev/null +++ b/crates/criticalup-cli/tests/cli/binary_proxies.rs @@ -0,0 +1,105 @@ +use crate::assert_output; +use crate::utils::TestEnvironment; +use std::io::Write; +use std::path::Path; +use std::process::{Command, Stdio}; +use tempfile::tempdir; + +const PROJECT_MANIFEST: &str = " +manifest-version = 1 + +[products.ferrocene] +release = \"nightly\" +packages = [\"rustc\"] +"; +// This is specific to the ferrocene product defined in the manifest above. +const INSTALLATION_ID: &str = "1f67f84fa2c0e3d1b99bf72f971b7a10eef29d91b50d9d9f82371c659eff2f0a"; + +#[test] +fn invoking_outside_of_project() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.binary_proxy("rustc")); +} + +#[test] +fn invoking_inside_of_project_with_no_installed_proxy() { + let test_env = TestEnvironment::prepare(); + + let current_dir = tempdir().unwrap(); + std::fs::write( + current_dir.path().join("criticalup.toml"), + PROJECT_MANIFEST.as_bytes(), + ) + .unwrap(); + + assert_output!(test_env + .binary_proxy("sample") + .current_dir(current_dir.path())); +} + +#[test] +fn invoking_inside_of_installed_project() { + let test_env = TestEnvironment::prepare(); + + let current_dir = tempdir().unwrap(); + std::fs::write( + current_dir.path().join("criticalup.toml"), + PROJECT_MANIFEST.as_bytes(), + ) + .unwrap(); + + // Create a sample state file referencing the binary proxy. + std::fs::write( + test_env.root().join("state.json"), + serde_json::json!({ + "version": 1, + "installations": { + INSTALLATION_ID: { + "manifests": ["/path/to/manifest/a", "/path/to/manifest/b"], + "binary_proxies": { + "sample": "bin/sample", + }, + }, + }, + }) + .to_string() + .as_bytes(), + ) + .unwrap(); + + // Create a sample binary. + compile_to( + &test_env + .root() + .join("toolchains") + .join(INSTALLATION_ID) + .join("bin") + .join("sample"), + r#"fn main() { println!("proxies work!"); }"#, + ); + + assert_output!(test_env + .binary_proxy("sample") + .current_dir(current_dir.path())); +} + +pub(crate) fn compile_to(dest: &Path, source: &str) { + if let Some(parent) = dest.parent() { + std::fs::create_dir_all(parent).unwrap(); + } + + let mut rustc = Command::new("rustc") + .arg("-") + .arg("-o") + .arg(dest) + .stdin(Stdio::piped()) + .spawn() + .unwrap(); + + let mut stdin = rustc.stdin.take().unwrap(); + stdin.write_all(source.as_bytes()).unwrap(); + drop(stdin); + + let status = rustc.wait().unwrap(); + assert!(status.success()); +} diff --git a/crates/criticalup-cli/tests/cli/clean.rs b/crates/criticalup-cli/tests/cli/clean.rs new file mode 100644 index 00000000..152a62a1 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/clean.rs @@ -0,0 +1,254 @@ +use crate::assert_output; +use crate::utils::TestEnvironment; +use serde_json::{json, Value}; +use std::fs; +use std::fs::File; +use std::io::{BufReader, Write}; + +#[test] +fn help_message() { + let env = TestEnvironment::prepare(); + assert_output!(env.cmd().args(["clean", "--help"])); +} + +#[test] +fn clean_deletes_only_unused_installations() { + let test_env = TestEnvironment::prepare(); + + let installation_id_1 = "installation_id_1"; + let installation_id_2 = "installation_id_2"; + let installation_id_3 = "installation_id_3"; + + let root = test_env.root().join("state.json"); + let mut state_file = std::fs::File::create(&root).unwrap(); + let content = json!({ + "version": 1, + "authentication_token": "criticalup_token_45_hahaha", + "installations": { + installation_id_1: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }, + installation_id_2: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [] + }, + installation_id_3: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/rustc" + }, + "manifests": [] + } + } + }) + .to_string(); + state_file.write_all(content.as_bytes()).unwrap(); + + assert_output!(test_env.cmd().args(["clean"])); + + let state_file_actual: Value = + serde_json::from_reader(BufReader::new(File::open(&root).unwrap())).unwrap(); + // "installation_id_2" is not present. + assert_eq!( + state_file_actual.pointer(format!("/installations/{}", installation_id_2).as_str()), + None + ); + // "installation_id_3" is not present. + assert_eq!( + state_file_actual.pointer(format!("/installations/{}", installation_id_3).as_str()), + None + ); + // "installation_id_1" is still present with correct values. + assert_eq!( + state_file_actual + .pointer(format!("/installations/{}", installation_id_1).as_str()) + .unwrap(), + &json!({ + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }) + ); +} + +#[test] +fn clean_deletes_only_unused_installations_also_from_disk() { + let test_env = TestEnvironment::prepare(); + let root = test_env.root(); + let toolchains_dir = root.join("toolchains"); + fs::create_dir_all(&toolchains_dir).unwrap(); + + let installation_id_1 = "installation_id_1"; + let installation_id_2 = "installation_id_2"; + let installation_id_3 = "installation_id_3"; + + let state_file_in_root = root.join("state.json"); + let mut state_file = std::fs::File::create(&state_file_in_root).unwrap(); + let content = json!({ + "version": 1, + "authentication_token": "criticalup_token_45_hahaha", + "installations": { + installation_id_1: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }, + installation_id_2: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [] + }, + installation_id_3: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/rustc" + }, + "manifests": [] + } + } + }) + .to_string(); + state_file.write_all(content.as_bytes()).unwrap(); + + // Create the corresponding physical directories of installations. + // TODO: We have to generate these by running `install` command, once tests for those are setup. + fs::create_dir_all(toolchains_dir.join(installation_id_1)).unwrap(); + fs::create_dir_all(toolchains_dir.join(installation_id_2)).unwrap(); + fs::create_dir_all(toolchains_dir.join(installation_id_3)).unwrap(); + + assert!(toolchains_dir.join(installation_id_1).exists()); + assert!(toolchains_dir.join(installation_id_2).exists()); + assert!(toolchains_dir.join(installation_id_3).exists()); + + // Run the `clean` command. + assert_output!(test_env.cmd().args(["clean"])); + + // Test the actual values. + let state_file_actual: Value = + serde_json::from_reader(BufReader::new(File::open(&state_file_in_root).unwrap())).unwrap(); + + // "installation_id_2" is not present. + assert_eq!( + state_file_actual.pointer("/installations/installation_id_2"), + None + ); + // "installation_id_3" is not present. + assert_eq!( + state_file_actual.pointer("/installations/installation_id_3"), + None + ); + // "installation_id_1" is still present with correct values. + assert_eq!( + state_file_actual + .pointer("/installations/installation_id_1") + .unwrap(), + &json!({ + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }) + ); + + assert!(toolchains_dir.join(installation_id_1).exists()); + assert!(!toolchains_dir.join(installation_id_2).exists()); // Does not exist. + assert!(!toolchains_dir.join(installation_id_3).exists()); // Does not exist. +} + +#[test] +fn removes_unused_installations_from_disk_that_do_not_have_state() { + let test_env = TestEnvironment::prepare(); + let root = test_env.root(); + let toolchains_dir = root.join("toolchains"); + fs::create_dir_all(&toolchains_dir).unwrap(); + + let installation_id_1 = "installation_id_1"; + let installation_id_2 = "installation_id_2"; + let installation_id_3 = "installation_id_3"; // No State, only directory + + let state_file_in_root = root.join("state.json"); + let mut state_file = std::fs::File::create(&state_file_in_root).unwrap(); + let content = json!({ + "version": 1, + "authentication_token": "criticalup_token_45_hahaha", + "installations": { + installation_id_1: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }, + installation_id_2: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [] + } + } + }) + .to_string(); + state_file.write_all(content.as_bytes()).unwrap(); + + // Create the corresponding physical directories of installations. + // TODO: We have to generate these by running `install` command, once tests for those are setup. + fs::create_dir_all(toolchains_dir.join(installation_id_1)).unwrap(); + fs::create_dir_all(toolchains_dir.join(installation_id_2)).unwrap(); + fs::create_dir_all(toolchains_dir.join(installation_id_3)).unwrap(); + + assert!(toolchains_dir.join(installation_id_1).exists()); + assert!(toolchains_dir.join(installation_id_2).exists()); + assert!(toolchains_dir.join(installation_id_3).exists()); + + // Run the `clean` command. + assert_output!(test_env.cmd().args(["clean"])); + + // Test the actual values. + let state_file_actual: Value = + serde_json::from_reader(BufReader::new(File::open(&state_file_in_root).unwrap())).unwrap(); + + // "installation_id_2" is not present. + assert_eq!( + state_file_actual.pointer("/installations/installation_id_2"), + None + ); + // "installation_id_1" is still present with correct values. + assert_eq!( + state_file_actual + .pointer("/installations/installation_id_1") + .unwrap(), + &json!({ + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/proj/1/criticalup.toml", + "/path/to/proj/2/criticalup.toml" + ] + }) + ); + + assert!(toolchains_dir.join(installation_id_1).exists()); + assert!(!toolchains_dir.join(installation_id_2).exists()); // Does not exist. + assert!(!toolchains_dir.join(installation_id_3).exists()); // Does not exist. +} diff --git a/crates/criticalup-cli/tests/cli/install.rs b/crates/criticalup-cli/tests/cli/install.rs new file mode 100644 index 00000000..6d228c56 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/install.rs @@ -0,0 +1,125 @@ +use crate::assert_output; +use crate::utils::{auth_set_with_valid_token, construct_toolchains_product_path, TestEnvironment}; +use serde_json::json; +use std::io::Write; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["install", "--help"])); +} + +#[test] +fn already_installed_toolchain_should_not_throw_error() { + let test_env = TestEnvironment::prepare(); + + let mut current_dir = std::env::current_dir().unwrap(); + current_dir.push("tests/resources/criticalup.toml"); + let manifest_path = current_dir.to_str().unwrap(); + + // Generate the manifest object so we can get the installation id hash. + let manifest = + criticalup_core::project_manifest::ProjectManifest::load(current_dir.as_path()).unwrap(); + let installation_id = manifest.products()[0].installation_id(); + + // Generate and write state.json file because our handy functions like + // state::update_installation_manifests() check for the state file as well as + // existing installation directories. + // + // This is brittle on subject to criticalup.toml changes in the tests/resource but right now + // `TestEnvironment` in this crate does not support constructing environment with State. + // So, we are resorting to creating this state.json by hand. Once the two environments for the + // test utils are merged, we can use the State API. + let root = test_env.root().join("state.json"); + let mut state_file = std::fs::File::create(root).unwrap(); + // 6bb4fe4c8205d18a8eaf0b852c3b29f65805fd80e528af74cf2f1463a911e40e is the hash of the + // current criticalup.toml's product contents which we use here to create state.json by + // dynamically calculating it from the criticalup.toml. + let content = json!( + { "version": 1, + "authentication_token": "criticalup_token_45_hahaha", + "installations": { + &installation_id.0: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + "/path/to/criticalup.toml" + ] + } + } + } + ) + .to_string(); + + state_file.write_all(content.as_bytes()).unwrap(); + + // Manually create the toolchain directory which allows us to skip installation. + let product_toolchain_dir = + construct_toolchains_product_path(&test_env, installation_id.0.as_str()); + std::fs::create_dir_all(product_toolchain_dir).unwrap(); + + // Running install command should skip installation. + // See the `filter()` used in utils::assert_output macro for this test. + assert_output!(test_env.cmd().args(["install", "--project", manifest_path])) +} + +/// Sample test to run the command in test environment without any other computation +#[test] +#[ignore = "Testing `install` subcommand will be enabled at a later date"] +fn run_install() { + let test_env = TestEnvironment::prepare(); + + let mut current_dir = std::env::current_dir().unwrap(); + current_dir.push("tests/resources/criticalup.toml"); + let manifest_path = current_dir.to_str().unwrap(); + + run_install_cmd(&test_env, manifest_path); +} + +#[test] +#[ignore = "Testing `install` subcommand will be enabled at a later date"] +fn product_dirs_are_created() { + let test_env = TestEnvironment::prepare(); + + let mut current_dir = + std::env::current_dir().expect("Could not read current directory in the test."); + current_dir.push("tests/resources/criticalup.toml"); + let manifest_path = current_dir.to_str().expect("conversion to str failed"); + + run_install_cmd(&test_env, manifest_path); + + let ex1 = construct_toolchains_product_path( + &test_env, + "791180e94af037a98410323424f9bfda82d82fdbc991a9cd8da30a091459f5f5", + ); + assert!(ex1.exists()); + + let ex2 = construct_toolchains_product_path( + &test_env, + "ceac76fcf73a702d9349a7064679606f90c4d8db09a763c9fd4d5acd9059544d", + ); + assert!(ex2.exists()); + + let ex3 = construct_toolchains_product_path( + &test_env, + "723bbd3fb691ce24dc6d59afc5f9d4caabce6b359ac512784c057bef7025b095", + ); + assert!(ex3.exists()); +} + +fn run_install_cmd(test_env: &TestEnvironment, manifest_path: &str) { + auth_set_with_valid_token(test_env); // we need auth set before install command + + let output = test_env + .cmd() + .args(["install", "--project", manifest_path]) + .output() + .unwrap(); + + assert!( + output.status.success(), + "{}", + String::from_utf8_lossy(&output.stderr) + ); +} diff --git a/crates/criticalup-cli/tests/cli/main.rs b/crates/criticalup-cli/tests/cli/main.rs new file mode 100644 index 00000000..57253fe7 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/main.rs @@ -0,0 +1,13 @@ +#![allow(clippy::wrong_self_convention)] + +mod auth; +mod auth_remove; +mod auth_set; +mod binary_proxies; +mod clean; +mod install; +mod remove; +mod root; +mod run; +mod utils; +mod which; diff --git a/crates/criticalup-cli/tests/cli/remove.rs b/crates/criticalup-cli/tests/cli/remove.rs new file mode 100644 index 00000000..ec06f610 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/remove.rs @@ -0,0 +1,70 @@ +use crate::assert_output; +use crate::utils::{construct_toolchains_product_path, TestEnvironment}; +use serde_json::{json, Value}; +use std::fs::File; +use std::io::{BufReader, Write}; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["remove", "--help"])); +} + +#[test] +fn remove_deletes_only_manifest_from_list_and_dir() { + let test_env = TestEnvironment::prepare(); + let mut current_dir = std::env::current_dir().unwrap(); + current_dir.push("tests/resources/criticalup.toml"); + let manifest_path = current_dir.canonicalize().unwrap(); + + // Generate the manifest object so we can get the installation id hash. + let manifest = + criticalup_core::project_manifest::ProjectManifest::load(current_dir.as_path()).unwrap(); + let installation_id = manifest.products()[0].installation_id(); + + let state_file_path = test_env.root().join("state.json"); + let mut state_file = File::create(&state_file_path).unwrap(); + // 6bb4fe4c8205d18a8eaf0b852c3b29f65805fd80e528af74cf2f1463a911e40e is the hash of the + // current criticalup.toml's product contents which we use here to create state.json by + // dynamically calculating it from the criticalup.toml. + let content = json!( + { "version": 1, + "authentication_token": "criticalup_token_45_hahaha", + "installations": { + &installation_id.0: { + "binary_proxies": { + "cargo": "/path/toolchains/bin/cargo" + }, + "manifests": [ + manifest_path + ] + } + } + } + ) + .to_string(); + + state_file.write_all(content.as_bytes()).unwrap(); + // Manually create the toolchain directory which allows us to skip installation. + let product_toolchain_dir = + construct_toolchains_product_path(&test_env, installation_id.0.as_str()); + std::fs::create_dir_all(&product_toolchain_dir).unwrap(); + + assert_output!(test_env + .cmd() + .args(["remove", "--project", manifest_path.to_str().unwrap()])); + + let state_file_actual: Value = + serde_json::from_reader(BufReader::new(File::open(&state_file_path).unwrap())).unwrap(); + + // Installation's manifest is an empty array because the manifest path was removed. + assert_eq!( + state_file_actual + .pointer(format!("/installations/{}/manifests", installation_id.0).as_str()) + .unwrap(), + &json!([]) + ); + + // Directory is gone. + assert!(!product_toolchain_dir.exists()); +} diff --git a/crates/criticalup-cli/tests/cli/root.rs b/crates/criticalup-cli/tests/cli/root.rs new file mode 100644 index 00000000..e92b8042 --- /dev/null +++ b/crates/criticalup-cli/tests/cli/root.rs @@ -0,0 +1,37 @@ +use crate::assert_output; +use crate::utils::TestEnvironment; + +#[test] +fn no_args() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd()); +} + +#[test] +fn help_flags() { + let test_env = TestEnvironment::prepare(); + + let no_args = test_env.cmd().output().unwrap(); + let help_short = test_env.cmd().arg("-h").output().unwrap(); + let help_long = test_env.cmd().arg("--help").output().unwrap(); + + assert_eq!(&no_args.stdout, &help_short.stdout); + assert_eq!(&no_args.stdout, &help_long.stdout); + + assert_eq!(&no_args.stderr, &help_short.stderr); + assert_eq!(&no_args.stderr, &help_long.stderr); + + assert!(help_short.status.success()); + assert!(help_long.status.success()); +} + +#[test] +fn version_flags() { + let test_env = TestEnvironment::prepare(); + + let version_short = test_env.cmd().arg("-V").output().unwrap(); + let version_long = test_env.cmd().arg("--version").output().unwrap(); + assert_eq!(version_long, version_short); + + assert_output!(test_env.cmd().arg("--version")); +} diff --git a/crates/criticalup-cli/tests/cli/run.rs b/crates/criticalup-cli/tests/cli/run.rs new file mode 100644 index 00000000..2bdb4ece --- /dev/null +++ b/crates/criticalup-cli/tests/cli/run.rs @@ -0,0 +1,116 @@ +use crate::assert_output; +use crate::utils::TestEnvironment; +use criticalup_core::project_manifest::ProjectManifest; +use std::io::Write; +use tempfile::tempdir; + +#[test] +fn help_message() { + let env = TestEnvironment::prepare(); + assert_output!(env.cmd().args(["run", "--help"])); +} + +#[test] +fn simple_run_command_manifest_not_found() { + // Manifest does not exist. + let test_env = TestEnvironment::prepare(); + assert_output!(test_env + .cmd() + .args(["run", "--project", "/path/to/criticalup.toml", "rustc"])); +} + +#[test] +fn simple_run_command_missing_package() { + // Make sure the project manifest exists, but the package 'rustc' does not. + let test_env = TestEnvironment::prepare(); + let current_dir = tempdir().unwrap(); + let manifest = current_dir.path().join("criticalup.toml"); + + let project_manifest = " + manifest-version = 1 + [products.ferrocene] + release = \"nightly\" + packages = [\"sample\"] + "; + std::fs::write(&manifest, project_manifest.as_bytes()).unwrap(); + assert_output!(test_env + .cmd() + .args(["run", "--project", manifest.to_str().unwrap(), "rustc"])); +} + +#[test] +#[ignore = "This test will be improved upon at a later date"] +fn simple_run_command_existing_package() { + let test_env = TestEnvironment::prepare(); + let current_dir = tempdir().unwrap(); + std::fs::create_dir_all(test_env.root().join("bin")).unwrap(); + + let project_manifest = " + manifest-version = 1 + [products.ferrocene] + release = \"nightly\" + packages = [\"sample\"] + "; + std::fs::write( + current_dir.path().join("criticalup.toml"), + project_manifest.as_bytes(), + ) + .unwrap(); + + let installation_id = + ProjectManifest::load(current_dir.path().join("criticalup.toml").as_path()) + .unwrap() + .products() + .first() + .unwrap() + .installation_id() + .0; + // Create a sample state file referencing the binary proxy. + std::fs::write( + test_env.root().join("state.json"), + serde_json::json!({ + "version": 1, + "installations": { + &installation_id: { + "manifests": ["/path/to/manifest/a", "/path/to/manifest/b"], + "binary_proxies": { + "sample": "bin/sample", + }, + }, + }, + }) + .to_string() + .as_bytes(), + ) + .unwrap(); + + // Create a sample binary. + crate::binary_proxies::compile_to( + &test_env + .root() + .join("toolchains") + .join(&installation_id) + .join("bin") + .join("sample"), + r#"fn main() { println!("success: sample binary was called via run command"); }"#, + ); + let mut f = std::fs::File::create(test_env.root().join("bin/sample")).unwrap(); + f.write_all(b"").unwrap(); + + let mut c = std::process::Command::new( + test_env + .root() + .join("toolchains") + .join(&installation_id) + .join("bin") + .join("sample") + .as_os_str() + .to_str() + .unwrap(), + ); + std::io::stdout() + .write_all(&c.output().unwrap().stdout) + .unwrap(); + + // assert_output!(test_env.cmd().args(["run", "sample"])); +} diff --git a/crates/criticalup-cli/tests/cli/utils.rs b/crates/criticalup-cli/tests/cli/utils.rs new file mode 100644 index 00000000..d2c609ed --- /dev/null +++ b/crates/criticalup-cli/tests/cli/utils.rs @@ -0,0 +1,249 @@ +use criticaltrust::keys::{EphemeralKeyPair, KeyAlgorithm, KeyPair, KeyRole, PublicKey}; +use criticaltrust::manifests::{Release, ReleaseManifest}; +use criticaltrust::signatures::SignedPayload; +use mock_download_server::{AuthenticationToken, MockServer}; +use std::borrow::Cow; +use std::io::{Seek, Write}; +use std::path::{Path, PathBuf}; +use std::process::{Command, Output, Stdio}; +use tempfile::TempDir; + +pub(crate) const MOCK_AUTH_TOKENS: &[(&str, AuthenticationToken)] = &[ + ( + "criticalup_token_000000000", + AuthenticationToken { + name: Cow::Borrowed("dummy token 1"), + organization_name: Cow::Borrowed("internal"), + expires_at: None, + }, + ), + ( + "criticalup_token_111111111", + AuthenticationToken { + name: Cow::Borrowed("dummy token 2"), + organization_name: Cow::Borrowed("ferrous-systems"), + expires_at: Some(Cow::Borrowed("2022-01-01T00:00:00+00:00")), + }, + ), + ( + "criticalup_token_222222222", + AuthenticationToken { + name: Cow::Borrowed("dummy token 3"), + organization_name: Cow::Borrowed("ferrous-systems"), + expires_at: Some(Cow::Borrowed("2022-01-01T00:00:00+00:00")), + }, + ), +]; + +// This can't be a const since we call `new()` +pub(crate) fn mock_release_manifests() -> Vec<(&'static str, &'static str, ReleaseManifest)> { + vec![( + "ferrocene", + "dev", + ReleaseManifest { + version: criticaltrust::manifests::ManifestVersion, + signed: SignedPayload::new(&Release { + product: "ferrocene".into(), + release: "dev".into(), + commit: "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad".into(), + packages: vec![], + }) + .unwrap(), + }, + )] +} + +pub(crate) struct TestEnvironment { + root: TempDir, + trust_root: PublicKey, + server: MockServer, + customer_portal_url: String, +} + +impl TestEnvironment { + pub(crate) fn prepare() -> Self { + let keypair = EphemeralKeyPair::generate( + KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + KeyRole::Root, + None, + ) + .unwrap(); + + let root = TempDir::new_in(std::env::current_dir().unwrap()).unwrap(); + + TestEnvironment { + root, + trust_root: keypair.public().clone(), + server: setup_mock_server(&keypair), + customer_portal_url: "https://customers-test.ferrocene.dev".into(), + } + } + + pub(crate) fn root(&self) -> &Path { + self.root.path() + } + + pub(crate) fn cmd(&self) -> Command { + let mut command = Command::new(env!("CARGO_BIN_EXE_criticalup-test")); + command.env("CRITICALUP_ROOT", self.root.path()); + command.env("CRITICALUP_TEST_DOWNLOAD_SERVER_URL", self.server.url()); + command.env( + "CRITICALUP_TEST_CUSTOMER_PORTAL_URL", + &self.customer_portal_url, + ); + command.env( + "CRITICALUP_TEST_TRUST_ROOT", + serde_json::to_string(&self.trust_root).unwrap(), + ); + command.env("CRITICALUP_TESTING_IN_PROGRESS", "1"); + command + } + + pub(crate) fn binary_proxy(&self, name: &str) -> Command { + let mut command = self.cmd(); + command.env("CRITICALUP_TEST_OVERRIDE_ARG0", name); + command + } + + pub(crate) fn requests_served_by_mock_download_server(&self) -> usize { + self.server.served_requests_count() + } + + pub(crate) fn revoke_token(&self, token: &str) { + self.server.edit_data(|data| { + data.tokens.remove(token); + }); + } +} + +pub(crate) fn stdin(content: &str) -> Stdio { + let mut file = tempfile::tempfile().expect("failed to create temporary file"); + file.write_all(content.as_bytes()) + .expect("failed to write stdin"); + file.rewind().unwrap(); + file.into() +} + +fn setup_mock_server(keypair: &dyn KeyPair) -> MockServer { + let mut server = mock_download_server::new(); + for (token, data) in MOCK_AUTH_TOKENS { + server = server.add_token(token, data.clone()); + } + for (product, release, mut manifest) in mock_release_manifests() { + manifest.signed.add_signature(keypair).unwrap(); + server = + server.add_release_manifest(product.to_string(), release.to_string(), manifest.clone()); + } + server.start() +} + +pub(crate) trait IntoOutput { + fn into_output(&mut self) -> Output; +} + +impl IntoOutput for Command { + fn into_output(&mut self) -> Output { + self.output().expect("failed to execute command") + } +} + +impl IntoOutput for Output { + fn into_output(&mut self) -> Output { + self.clone() + } +} + +#[macro_export] +macro_rules! assert_output { + ($out:expr) => {{ + use $crate::utils::IntoOutput; + + let repr = $crate::utils::output_repr(&$out.into_output()); + let mut settings = insta::Settings::clone_current(); + settings.set_snapshot_path("../snapshots"); + + // using tempfile in tests changes the output tmp dir on every run + // so, this is to normalize the data first + #[cfg(target_os = "linux")] + settings.add_filter( + "/.*tmp.*/toolchains/(?[_a-zA-Z0-9]+)/?", + "/path/to/toolchain/installation/$ins_id/", + ); + #[cfg(target_os = "macos")] + settings.add_filter( + "/.*/toolchains/(?[_a-zA-Z0-9]+)/?", + "/path/to/toolchain/installation/$ins_id/", + ); + #[cfg(target_os = "windows")] + settings.add_filter( + r"[a-zA-Z]:\\.*\\toolchains\\(?[_a-zA-Z0-9]+)\\?", + "/path/to/toolchain/installation/$ins_id/", + ); + + #[cfg(windows)] + settings.add_filter( + r"error: The system cannot find the path specified\. \(os error 3\)", + "error: No such file or directory (os error 2)", + ); + + #[cfg(windows)] + settings.add_filter( + r"caused by: The system cannot find the path specified\. \(os error 3\)", + "caused by: No such file or directory (os error 2)", + ); + + #[cfg(windows)] + settings.add_filter("exit code: ", "exit status: "); + #[cfg(windows)] + settings.add_filter("criticalup-test.exe", "criticalup-test"); + settings.bind(|| { + insta::assert_snapshot!(repr); + }); + }}; +} + +pub(crate) fn output_repr(output: &Output) -> String { + let mut snapshot = String::new(); + snapshot.push_str(&format!("exit: {}\n", output.status)); + + snapshot.push('\n'); + if output.stdout.is_empty() { + snapshot.push_str("empty stdout\n"); + } else { + snapshot.push_str("stdout\n------\n"); + snapshot.push_str(std::str::from_utf8(&output.stdout).expect("non-utf-8 stdout")); + snapshot.push_str("------\n"); + } + + snapshot.push('\n'); + if output.stderr.is_empty() { + snapshot.push_str("empty stderr\n"); + } else { + snapshot.push_str("stderr\n------\n"); + snapshot.push_str(std::str::from_utf8(&output.stderr).expect("non-utf-8 stderr")); + snapshot.push_str("------\n"); + } + + snapshot +} + +pub(crate) fn construct_toolchains_product_path(env: &TestEnvironment, sha: &str) -> PathBuf { + let toolchains_dir = "toolchains"; + let product_dir_name = sha; + let mut root = env.root().to_path_buf(); + root.push(toolchains_dir); + root.push(product_dir_name); + root +} + +pub(crate) fn auth_set_with_valid_token(env: &TestEnvironment) { + let second_token = MOCK_AUTH_TOKENS[0].0; + + assert!(env + .cmd() + .args(["auth", "set", second_token]) + .output() + .expect("sssss") + .status + .success()); +} diff --git a/crates/criticalup-cli/tests/cli/which.rs b/crates/criticalup-cli/tests/cli/which.rs new file mode 100644 index 00000000..01292fff --- /dev/null +++ b/crates/criticalup-cli/tests/cli/which.rs @@ -0,0 +1,64 @@ +use crate::assert_output; +use crate::utils::{construct_toolchains_product_path, TestEnvironment}; +use criticalup_core::project_manifest::ProjectManifest; +use std::fs::File; + +#[test] +fn help_message() { + let test_env = TestEnvironment::prepare(); + assert_output!(test_env.cmd().args(["which", "--help"])); +} + +#[test] +fn which_run_binary_exists() { + let test_env = TestEnvironment::prepare(); + + let mut current_dir = + std::env::current_dir().expect("could not read current directory in the test."); + current_dir.push("tests/resources/criticalup-which.toml"); + + let manifest_path = current_dir.to_str().expect("conversion to str failed"); + + // generate the manifest object so we can get the installation id hash + let p = ProjectManifest::load(current_dir.as_path()).expect("could not load project manifest"); + let id_hash = p.products()[0].installation_id().0; + + // manually create the toolchain directory which allows us to skip installation + // TODO: when tests for `install` command are up, use that instead of manual creation + let product_toolchain_dir = construct_toolchains_product_path(&test_env, id_hash.as_str()); + let product_toolchain_bin_dir = product_toolchain_dir.join("bin"); + std::fs::create_dir_all(&product_toolchain_bin_dir) + .expect("could not create product directory"); + + // create a file "rustc" in the toolchain/.../bin + let _ = File::create(product_toolchain_bin_dir.join("rustc")).unwrap(); + + assert_output!(test_env + .cmd() + .args(["which", "rustc", "--project", manifest_path])); +} + +#[test] +fn which_run_binary_does_not_exists() { + let test_env = TestEnvironment::prepare(); + + let mut current_dir = + std::env::current_dir().expect("could not read current directory in the test."); + current_dir.push("tests/resources/criticalup-which.toml"); + + let manifest_path = current_dir.to_str().expect("conversion to str failed"); + + // generate the manifest object so we can get the installation id hash + let p = ProjectManifest::load(current_dir.as_path()).expect("could not load project manifest"); + let id_hash = p.products()[0].installation_id().0; + + // manually create the toolchain directory which allows us to skip installation + // TODO: when tests for `install` command are up, use that instead of manual creation + let product_toolchain_dir = construct_toolchains_product_path(&test_env, id_hash.as_str()); + let product_toolchain_bin_dir = product_toolchain_dir.join("bin"); + std::fs::create_dir_all(product_toolchain_bin_dir).expect("could not create product directory"); + + assert_output!(test_env + .cmd() + .args(["which", "rustc", "--project", manifest_path])); +} diff --git a/crates/criticalup-cli/tests/resources/criticalup-which.toml b/crates/criticalup-cli/tests/resources/criticalup-which.toml new file mode 100644 index 00000000..b97f5605 --- /dev/null +++ b/crates/criticalup-cli/tests/resources/criticalup-which.toml @@ -0,0 +1,8 @@ +# Manifest for test +manifest-version = 1 + +[products.ferrocene] +release = "nightly-2024-02-28" +packages = [ + "rustc-x86_64-unknown-linux-gnu", +] diff --git a/crates/criticalup-cli/tests/resources/criticalup.toml b/crates/criticalup-cli/tests/resources/criticalup.toml new file mode 100644 index 00000000..a0568fc8 --- /dev/null +++ b/crates/criticalup-cli/tests/resources/criticalup.toml @@ -0,0 +1,8 @@ +# Manifest for test +manifest-version = 1 + +[products.ferrocene] +release = "nightly-2024-02-28" +packages = [ + "ferrocene-self-test-x86_64-unknown-linux-gnu", +] diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__auth__help_message.snap new file mode 100644 index 00000000..06b1ef59 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth__help_message.snap @@ -0,0 +1,23 @@ +--- +source: crates/criticalup-cli/tests/cli/auth.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Show and change authentication with the download server + +Usage: + criticalup-test auth [COMMAND] + +Commands: + remove Remove the authentication token used to interact with the download server + set Set the authentication token used to interact with the download server + +Options: + -h, --help Print help +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth__invalid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth__invalid_token.snap new file mode 100644 index 00000000..f2685e8d --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth__invalid_token.snap @@ -0,0 +1,21 @@ +--- +source: crates/criticalup-cli/tests/cli/auth.rs +assertion_line: 24 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: failed to authenticate with the download server + +The authentication token could be missing, invalid or expired. +You can set a new authentication token by running: + + criticalup auth set + +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth__no_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth__no_token.snap new file mode 100644 index 00000000..defb0bef --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth__no_token.snap @@ -0,0 +1,21 @@ +--- +source: crates/criticalup-cli/tests/cli/auth.rs +assertion_line: 14 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: failed to authenticate with the download server + +The authentication token could be missing, invalid or expired. +You can set a new authentication token by running: + + criticalup auth set + +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth__token_with_expiry.snap b/crates/criticalup-cli/tests/snapshots/cli__auth__token_with_expiry.snap new file mode 100644 index 00000000..5f1f38d0 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth__token_with_expiry.snap @@ -0,0 +1,16 @@ +--- +source: crates/criticalup-cli/tests/cli/auth.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +valid authentication token present + +token name: dummy token 2 +organization name: ferrous-systems +expires at: 2022-01-01T00:00:00+00:00 +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth__token_without_expiry.snap b/crates/criticalup-cli/tests/snapshots/cli__auth__token_without_expiry.snap new file mode 100644 index 00000000..e84cf973 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth__token_without_expiry.snap @@ -0,0 +1,16 @@ +--- +source: crates/criticalup-cli/tests/cli/auth.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +valid authentication token present + +token name: dummy token 1 +organization name: internal +expires at: none +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_remove__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__help_message.snap new file mode 100644 index 00000000..1dea80b5 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__help_message.snap @@ -0,0 +1,19 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_remove.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Remove the authentication token used to interact with the download server + +Usage: + criticalup-test auth remove + +Options: + -h, --help Print help +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_missing.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_missing.snap new file mode 100644 index 00000000..de251d3e --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_missing.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_remove.rs +assertion_line: 15 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_present.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_present.snap new file mode 100644 index 00000000..5b119c19 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_remove__token_present.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_remove.rs +assertion_line: 38 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__byte_zero_via_stdin.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__byte_zero_via_stdin.snap new file mode 100644 index 00000000..7c465664 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__byte_zero_via_stdin.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 22 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: invalid authentication token provided +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__help_message.snap new file mode 100644 index 00000000..9afc6b8d --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__help_message.snap @@ -0,0 +1,22 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Set the authentication token used to interact with the download server + +Usage: + criticalup-test auth set [TOKEN] + +Arguments: + [TOKEN] Authentication token to use; if not provided, it will be read from stdin + +Options: + -h, --help Print help +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token.snap new file mode 100644 index 00000000..513cd97c --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: invalid authentication token provided +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token_with_existing_token.snap new file mode 100644 index 00000000..513cd97c --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_invalid_token_with_existing_token.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: invalid authentication token provided +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token.snap new file mode 100644 index 00000000..5737cb5b --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token_with_existing_token.snap new file mode 100644 index 00000000..5737cb5b --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_args__set_valid_token_with_existing_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token.snap new file mode 100644 index 00000000..513cd97c --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: invalid authentication token provided +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token_with_existing_token.snap new file mode 100644 index 00000000..513cd97c --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_invalid_token_with_existing_token.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: invalid authentication token provided +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token.snap new file mode 100644 index 00000000..5737cb5b --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token_with_existing_token.snap new file mode 100644 index 00000000..5737cb5b --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_stdin__set_valid_token_with_existing_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +assertion_line: 50 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token.snap new file mode 100644 index 00000000..611bc484 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token.snap @@ -0,0 +1,14 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: +error: invalid authentication token provided +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token_with_existing_token.snap new file mode 100644 index 00000000..611bc484 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_invalid_token_with_existing_token.snap @@ -0,0 +1,14 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: +error: invalid authentication token provided +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token.snap new file mode 100644 index 00000000..bb04c644 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token_with_existing_token.snap new file mode 100644 index 00000000..bb04c644 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_eod__set_valid_token_with_existing_token.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token.snap new file mode 100644 index 00000000..21e58524 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: error: invalid authentication token provided +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token_with_existing_token.snap new file mode 100644 index 00000000..21e58524 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_invalid_token_with_existing_token.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: error: invalid authentication token provided +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token.snap new file mode 100644 index 00000000..6aba5a88 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: ------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token_with_existing_token.snap b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token_with_existing_token.snap new file mode 100644 index 00000000..6aba5a88 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__auth_set__via_tty_nl__set_valid_token_with_existing_token.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/auth_set.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Visit https://customers-test.ferrocene.dev/users/tokens to create a new token, then enter it below. +enter the authentication token: ------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_installed_project.snap b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_installed_project.snap new file mode 100644 index 00000000..0cca32b7 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_installed_project.snap @@ -0,0 +1,14 @@ +--- +source: crates/criticalup-cli/tests/cli/binary_proxies.rs +assertion_line: 77 +expression: repr +--- +exit: exit status: 0 + +stdout +------ +proxies work! +------ + +empty stderr + diff --git a/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_project_with_no_installed_proxy.snap b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_project_with_no_installed_proxy.snap new file mode 100644 index 00000000..f3d8b540 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_inside_of_project_with_no_installed_proxy.snap @@ -0,0 +1,16 @@ +--- +source: crates/criticalup-cli/tests/cli/binary_proxies.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: criticalup could not invoke the binary you requested + caused by: 'sample' is not installed for this project. + +Please make sure that the correct package for 'sample' is listed in the packages section of your project's criticalup.toml and run 'criticalup install' command again. + +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_outside_of_project.snap b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_outside_of_project.snap new file mode 100644 index 00000000..b257e654 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__binary_proxies__invoking_outside_of_project.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/binary_proxies.rs +assertion_line: 8 +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: criticalup could not invoke the binary you requested + caused by: could not find a project manifest in the current or parent directories +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations.snap b/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations.snap new file mode 100644 index 00000000..d3f5da9b --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations.snap @@ -0,0 +1,16 @@ +--- +source: crates/criticalup-cli/tests/cli/clean.rs +expression: repr +--- +exit: exit status: 1 + +stdout +------ +info: deleting unused installation installation_id_2 +info: deleting unused installation installation_id_3 +------ + +stderr +------ +error: No such file or directory (os error 2) +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations_also_from_disk.snap b/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations_also_from_disk.snap new file mode 100644 index 00000000..6373bca4 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__clean__clean_deletes_only_unused_installations_also_from_disk.snap @@ -0,0 +1,16 @@ +--- +source: crates/criticalup-cli/tests/cli/clean.rs +expression: repr +--- +exit: exit status: 0 + +stdout +------ +info: deleting unused installation installation_id_2 +info: deleting unused installation directory /path/to/toolchain/installation/installation_id_2/ +info: deleting unused installation installation_id_3 +info: deleting unused installation directory /path/to/toolchain/installation/installation_id_3/ +info: no untracked installation directories found +------ + +empty stderr diff --git a/crates/criticalup-cli/tests/snapshots/cli__clean__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__clean__help_message.snap new file mode 100644 index 00000000..b045cd63 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__clean__help_message.snap @@ -0,0 +1,18 @@ +--- +source: crates/criticalup-cli/tests/cli/clean.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Delete all unused and untracked installations + +Usage: + criticalup-test clean + +Options: + -h, --help Print help +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__clean__removes_unused_installations_from_disk_that_do_not_have_state.snap b/crates/criticalup-cli/tests/snapshots/cli__clean__removes_unused_installations_from_disk_that_do_not_have_state.snap new file mode 100644 index 00000000..ae001920 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__clean__removes_unused_installations_from_disk_that_do_not_have_state.snap @@ -0,0 +1,14 @@ +--- +source: crates/criticalup-cli/tests/cli/clean.rs +expression: repr +--- +exit: exit status: 0 + +stdout +------ +info: deleting unused installation installation_id_2 +info: deleting unused installation directory /path/to/toolchain/installation/installation_id_2/ +info: deleting untracked installation directory /path/to/toolchain/installation/installation_id_3/ +------ + +empty stderr diff --git a/crates/criticalup-cli/tests/snapshots/cli__install__already_installed_toolchain_should_not_throw_error.snap b/crates/criticalup-cli/tests/snapshots/cli__install__already_installed_toolchain_should_not_throw_error.snap new file mode 100644 index 00000000..b51986e0 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__install__already_installed_toolchain_should_not_throw_error.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/install.rs +expression: repr +--- +exit: exit status: 0 + +stdout +------ +Skipping installation for product 'ferrocene' because it seems to be already installed. +If you want to reinstall it, please run 'criticalup remove' followed by 'criticalup install' command. +------ + +empty stderr diff --git a/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap new file mode 100644 index 00000000..c9b3d143 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__install__help_message.snap @@ -0,0 +1,19 @@ +--- +source: crates/criticalup-cli/tests/cli/install.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Install the toolchain for the given project based on the manifest `criticalup.toml` + +Usage: + criticalup-test install [OPTIONS] + +Options: + --project Path to the manifest `criticalup.toml` + -h, --help Print help +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__remove__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__remove__help_message.snap new file mode 100644 index 00000000..493281b9 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__remove__help_message.snap @@ -0,0 +1,19 @@ +--- +source: crates/criticalup-cli/tests/cli/remove.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Delete all the products specified in the manifest `criticalup.toml` + +Usage: + criticalup-test remove [OPTIONS] + +Options: + --project Path to the manifest `criticalup.toml` + -h, --help Print help +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__remove__remove_deletes_only_manifest_from_list_and_dir.snap b/crates/criticalup-cli/tests/snapshots/cli__remove__remove_deletes_only_manifest_from_list_and_dir.snap new file mode 100644 index 00000000..6e0ff13f --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__remove__remove_deletes_only_manifest_from_list_and_dir.snap @@ -0,0 +1,12 @@ +--- +source: crates/criticalup-cli/tests/cli/remove.rs +expression: repr +--- +exit: exit status: 0 + +stdout +------ +info: deleting installation 6bb4fe4c8205d18a8eaf0b852c3b29f65805fd80e528af74cf2f1463a911e40e +------ + +empty stderr diff --git a/crates/criticalup-cli/tests/snapshots/cli__root__no_args.snap b/crates/criticalup-cli/tests/snapshots/cli__root__no_args.snap new file mode 100644 index 00000000..842c8d13 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__root__no_args.snap @@ -0,0 +1,27 @@ +--- +source: crates/criticalup-cli/tests/cli/root.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +CriticalUp is the official tool to download and install Ferrocene + +Usage: + criticalup-test + +Commands: + auth Show and change authentication with the download server + install Install the toolchain for the given project based on the manifest `criticalup.toml` + clean Delete all unused and untracked installations + run Run a command for a given toolchain + remove Delete all the products specified in the manifest `criticalup.toml` + which Display which binary will be run for a given command + +Options: + -h, --help Print help + -V, --version Print version +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__root__version_flags.snap b/crates/criticalup-cli/tests/snapshots/cli__root__version_flags.snap new file mode 100644 index 00000000..4bbd84ef --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__root__version_flags.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/root.rs +assertion_line: 36 +expression: repr + +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +criticalup-test 0.0.0 +------ + diff --git a/crates/criticalup-cli/tests/snapshots/cli__run__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__run__help_message.snap new file mode 100644 index 00000000..b2a6eeef --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__run__help_message.snap @@ -0,0 +1,22 @@ +--- +source: crates/criticalup-cli/tests/cli/run.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Run a command for a given toolchain + +Usage: + criticalup-test run [OPTIONS] ... + +Arguments: + ... Command with possible args to run + +Options: + --project Path to the manifest `criticalup.toml` + -h, --help Print help +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_manifest_not_found.snap b/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_manifest_not_found.snap new file mode 100644 index 00000000..9986054f --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_manifest_not_found.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/run.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: failed to find canonical path for /path/to/criticalup.toml + caused by: No such file or directory (os error 2) +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_missing_package.snap b/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_missing_package.snap new file mode 100644 index 00000000..96c35652 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__run__simple_run_command_missing_package.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/run.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: 'rustc' is not installed for this project. + +Please make sure that the correct package for 'rustc' is listed in the packages section of your project's criticalup.toml and run 'criticalup install' command again. + +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__which__help_message.snap b/crates/criticalup-cli/tests/snapshots/cli__which__help_message.snap new file mode 100644 index 00000000..d954d1c0 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__which__help_message.snap @@ -0,0 +1,22 @@ +--- +source: crates/criticalup-cli/tests/cli/which.rs +expression: repr +--- +exit: exit status: 0 + +empty stdout + +stderr +------ +Display which binary will be run for a given command + +Usage: + criticalup-test which [OPTIONS] + +Arguments: + Name of the binary to find the absolute path of + +Options: + --project Path to the manifest `criticalup.toml` + -h, --help Print help +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_does_not_exists.snap b/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_does_not_exists.snap new file mode 100644 index 00000000..7c490f71 --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_does_not_exists.snap @@ -0,0 +1,15 @@ +--- +source: crates/criticalup-cli/tests/cli/which.rs +expression: repr +--- +exit: exit status: 1 + +empty stdout + +stderr +------ +error: 'rustc' is not installed for this project. + +Please make sure that the correct package for 'rustc' is listed in the packages section of your project's criticalup.toml and run 'criticalup install' command again. + +------ diff --git a/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_exists.snap b/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_exists.snap new file mode 100644 index 00000000..9b7ea61c --- /dev/null +++ b/crates/criticalup-cli/tests/snapshots/cli__which__which_run_binary_exists.snap @@ -0,0 +1,13 @@ +--- +source: crates/criticalup-cli/tests/cli/which.rs +expression: repr +--- +exit: exit status: 0 + +stdout +------ +/path/to/toolchain/installation/eee0c78b0f09ca88069fa1b14c35a6e70ffbfcce50f6ffb1e567f26a76ff7e89/bin/rustc + +------ + +empty stderr diff --git a/crates/criticalup-core/Cargo.toml b/crates/criticalup-core/Cargo.toml new file mode 100644 index 00000000..375f7ca2 --- /dev/null +++ b/crates/criticalup-core/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "criticalup-core" +version = "0.0.0" +edition = "2021" + +build = "build.rs" + +[dependencies] +criticaltrust = { path = "../criticaltrust" } +log = "0.4.14" +reqwest = { version = "0.11", default-features = false, features = ["blocking", "json", "rustls-tls", "rustls-tls-native-roots"] } +serde = { version = "1.0.136", features = ["derive"] } +serde_json = "1.0.79" +thiserror = "1.0.30" +toml_edit = { version = "0.13.4", features = ["serde"] } +sha2 = { version = "0.10" } +dirs = { version = "5.0.1", default-features = false } + +[target.'cfg(target_os = "linux")'.dependencies] +nix = "0.23.1" + +[dev-dependencies] +mock-download-server = { path = "../mock-download-server" } +tempfile = "3.3.0" diff --git a/crates/criticalup-core/build.rs b/crates/criticalup-core/build.rs new file mode 100644 index 00000000..84f35814 --- /dev/null +++ b/crates/criticalup-core/build.rs @@ -0,0 +1,9 @@ +fn main() { + println!( + "cargo:rustc-env=TARGET={}", + std::env::var("TARGET").unwrap() + ); + + // Only re-execute the build script when the build script itself changes. + println!("cargo:rerun-if-changed=build.rs"); +} diff --git a/crates/criticalup-core/src/binary_proxies.rs b/crates/criticalup-core/src/binary_proxies.rs new file mode 100644 index 00000000..1f20ea74 --- /dev/null +++ b/crates/criticalup-core/src/binary_proxies.rs @@ -0,0 +1,347 @@ +//! Binary proxies are binaries named after the tools included in Ferrocene (like rustc, rustdoc, +//! cargo, etc...), that check which criticalup installation to use before executing the actual +//! binary inside of the chosen criticalup installation. + +use std::collections::HashSet; +use std::path::Path; + +use crate::config::Config; +use crate::errors::BinaryProxyUpdateError; +use crate::state::State; + +/// Update the set of binary proxies to reflect the current state of things. This will: +/// +/// * Add any new binary proxy added to the state since the last update. +/// +/// * Remove any binary proxy not referenced in the state anymore. +/// +/// * Replace all binary proxy binaries with new copies if they point to a different +/// `proxy_binary`, to ensure they all point to the latest available version. This is likely to +/// occur after the user updates criticalup. +/// +pub fn update( + config: &Config, + state: &State, + proxy_binary: &Path, +) -> Result<(), BinaryProxyUpdateError> { + let mut expected_proxies = state + .all_binary_proxy_names() + .into_iter() + .collect::>(); + + let dir = &config.paths.proxies_dir; + let list_dir_error = |e| BinaryProxyUpdateError::ListDirectoryFailed(dir.into(), e); + match dir.read_dir() { + Ok(iter) => { + for entry in iter { + let entry = entry.map_err(list_dir_error)?; + + let entry_name = match entry.file_name().to_str().map(|s| s.to_string()) { + Some(name) => name, + None => { + // No binary proxy will have a non-UTF-8 name. + remove_unexpected(&entry.path())?; + continue; + } + }; + + if expected_proxies.remove(&*entry_name) { + ensure_link(proxy_binary, &entry.path())?; + } else { + remove_unexpected(&entry.path())?; + } + } + } + // If the directory is missing we can skip trying to update its contents, as the next loop + // will then create all the proxies. + Err(err) if err.kind() == std::io::ErrorKind::NotFound => {} + Err(err) => return Err(list_dir_error(err)), + } + + for proxy in expected_proxies { + ensure_link(proxy_binary, &config.paths.proxies_dir.join(&proxy))?; + } + + Ok(()) +} + +#[cfg(unix)] +fn ensure_link(proxy_binary: &Path, target: &Path) -> Result<(), BinaryProxyUpdateError> { + let canonicalize = |path: &Path| { + std::fs::canonicalize(path) + .map_err(|e| BinaryProxyUpdateError::InspectFailed(path.into(), e)) + }; + + let should_create = match target.read_link() { + Ok(target_dest) => { + if canonicalize(proxy_binary)? == canonicalize(&target_dest)? { + false + } else { + remove_unexpected(target)?; + true + } + } + Err(err) if err.kind() == std::io::ErrorKind::InvalidInput => { + remove_unexpected(target)?; + true + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => true, + Err(err) => return Err(BinaryProxyUpdateError::InspectFailed(target.into(), err)), + }; + + if should_create { + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent).map_err(|e| { + BinaryProxyUpdateError::ParentDirectoryCreationFailed(parent.into(), e) + })?; + } + std::os::unix::fs::symlink(proxy_binary, target).map_err(|e| { + BinaryProxyUpdateError::SymlinkFailed { + source: proxy_binary.into(), + dest: target.into(), + inner: e, + } + })?; + } + + Ok(()) +} + +#[cfg(windows)] +fn ensure_link(proxy_binary: &Path, target: &Path) -> Result<(), BinaryProxyUpdateError> { + // We cannot use `canonicalize` safely here since it basically doesn't work on Windows. + // For example, on even a relatively uncomplicated dev machine attempting to canonicalize a link + // between two files in the same folder on the same disk fails with + // + // ``` + // The file or directory is not a reparse point. + // ``` + // + // So, instead of checking to see if the link exists and is correct, we just blindly rewrite it. + if target.exists() { + remove_unexpected(target)?; + }; + + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| BinaryProxyUpdateError::ParentDirectoryCreationFailed(parent.into(), e))?; + } + + // We opt against symlinks on Windows. Many of our users are not on Windows 11 which does + // support unpriviledged symlinks. + // + // On Windows 10, symlinks can be done by priviledged users, or users with "Developer Mode" + // enabled, but not all of our users have that. + std::fs::hard_link(proxy_binary, target).map_err(|e| { + BinaryProxyUpdateError::SymlinkFailed { + source: proxy_binary.into(), + dest: target.into(), + inner: e, + } + })?; + + Ok(()) +} + +fn remove_unexpected(path: &Path) -> Result<(), BinaryProxyUpdateError> { + let result = if path.is_dir() { + std::fs::remove_dir_all(path) + } else { + std::fs::remove_file(path) + }; + match result { + Ok(()) => Ok(()), + Err(err) => Err(BinaryProxyUpdateError::UnexpectedPathRemovalFailed( + path.into(), + err, + )), + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + use std::io::Write; + + use tempfile::{tempdir, NamedTempFile}; + + use criticaltrust::integrity::VerifiedPackage; + + use crate::project_manifest::InstallationId; + use crate::test_utils::TestEnvironment; + + use super::*; + + #[test] + fn test_update() { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let installation_dir = &test_env.config().paths.installation_dir; + let state = test_env.state(); + + // Installation 1, with only one project manifest. + let inst1 = InstallationId("1".into()); + std::fs::create_dir_all(installation_dir.clone().join("1")).unwrap(); + let inst1_first_manifest_path = root.join("proj/1/manifest"); + std::fs::create_dir_all(&inst1_first_manifest_path).unwrap(); + + // Installation 2, with two project manifests in different locations. + let inst2 = InstallationId("2".into()); + std::fs::create_dir_all(installation_dir.clone().join("2")).unwrap(); + let inst2_first_manifest_path = root.join("proj/2/manifest-1"); + std::fs::create_dir_all(&inst2_first_manifest_path).unwrap(); + // Another manifest for the same project. + let inst2_second_manifest_path = root.join("project/2/manifest-2"); + std::fs::create_dir_all(&inst2_second_manifest_path).unwrap(); + + let mut proxy1 = NamedTempFile::new_in(test_env.root()).unwrap(); + proxy1.write_all(b"proxied binary 1").unwrap(); + let proxy1 = proxy1.path(); + + let mut proxy2 = NamedTempFile::new_in(test_env.root()).unwrap(); + proxy2.write_all(b"proxied binary 2").unwrap(); + let proxy2 = proxy2.path(); + + // Add a first installation with a few binaries with manifest1. + state + .add_installation( + &inst1, + &verified_packages(&["bin1", "bin2"]), + &inst1_first_manifest_path, + test_env.config(), + ) + .unwrap(); + update(test_env.config(), state, proxy1).unwrap(); + assert_proxies(test_env.config(), proxy1, &["bin1", "bin2"]); + + // Add a second installation, ensure the new binary is added. + state + .add_installation( + &inst2, + &verified_packages(&["bin3"]), + &inst2_first_manifest_path, + test_env.config(), + ) + .unwrap(); + update(test_env.config(), state, proxy1).unwrap(); + assert_proxies(test_env.config(), proxy1, &["bin1", "bin2", "bin3"]); + + // Same installation but a different location of a manifest, which means that another + // project with the same manifest content. + state + .add_installation( + &inst2, + &verified_packages(&[]), + &inst2_second_manifest_path, + test_env.config(), + ) + .unwrap(); + update(test_env.config(), state, proxy1).unwrap(); + assert_proxies(test_env.config(), proxy1, &["bin1", "bin2", "bin3"]); + + // Remove the first installation *and* change the path of the proxy binary (to simulate a + // new criticalup binary after an update). + state.remove_installation(&inst1); + update(test_env.config(), state, proxy2).unwrap(); + assert_proxies(test_env.config(), proxy2, &["bin3"]); + + // Remove the last installation to ensure all proxies are removed. + state.remove_installation(&inst2); + update(test_env.config(), state, proxy2).unwrap(); + assert_proxies(test_env.config(), proxy2, &[]); + + fn verified_packages(proxies: &[&str]) -> Vec { + let mut proxies_paths = BTreeMap::new(); + for proxy in proxies { + proxies_paths.insert(proxy.to_string(), format!("bin/{proxy}")); + } + + vec![VerifiedPackage { + product: String::new(), + package: String::new(), + proxies_paths, + }] + } + + #[track_caller] + fn assert_proxies(config: &Config, expected_proxy: &Path, expected: &[&str]) { + let expected_proxy_content = std::fs::read(expected_proxy).unwrap(); + + let mut found_proxies = Vec::new(); + for file in config.paths.proxies_dir.read_dir().unwrap() { + let file = file.unwrap().path(); + found_proxies.push(file.file_name().unwrap().to_str().unwrap().to_string()); + + // To chech whether the proxy links to the right binary we read the content and + // compare it. We do this compared to (for example) checking the target of the + // symlink to make this test resilient to changes in how we create links. + let proxy_content = std::fs::read(&file).unwrap(); + assert_eq!( + expected_proxy_content, + proxy_content, + "wrong content for {}", + file.display() + ); + } + found_proxies.sort(); + + let mut expected_proxies = expected.to_vec(); + expected_proxies.sort(); + + assert_eq!(expected_proxies, found_proxies); + } + } + + #[test] + fn test_ensure_link() { + let dir = tempdir().unwrap(); + assert!(dir.path().is_absolute()); + + let create_file = |name: &str| { + let path = dir.path().join(name); + std::fs::write(&path, name.as_bytes()).unwrap(); + path + }; + + let source1 = create_file("source1"); + let source2 = create_file("source2"); + + // Test creating the link when no existing link was present. + let link1 = dir.path().join("link1"); + ensure_link(&source1, &link1).unwrap(); + assert_link(&source1, &link1); + + // Test calling the function again with the same inputs. + ensure_link(&source1, &link1).unwrap(); + assert_link(&source1, &link1); + + // Test replacing the link with a new target. + ensure_link(&source2, &link1).unwrap(); + assert_link(&source2, &link1); + + // Test creating a link when a non-link file exists in its place. + let link2 = create_file("link2"); + ensure_link(&source1, &link2).unwrap(); + assert_link(&source1, &link2); + + // Test creating a link when a directory with contents exists in its place. + let link3 = dir.path().join("link3"); + std::fs::create_dir(&link3).unwrap(); + std::fs::write(link3.join("file"), b"").unwrap(); + ensure_link(&source1, &link3).unwrap(); + assert_link(&source1, &link3); + + #[track_caller] + fn assert_link(source: &Path, link: &Path) { + let source_content = std::fs::read(source).unwrap(); + let link_content = std::fs::read(link).unwrap(); + assert_eq!( + source_content, + link_content, + "{} doesn't link to {}", + link.display(), + source.display() + ); + } + } +} diff --git a/crates/criticalup-core/src/config/mod.rs b/crates/criticalup-core/src/config/mod.rs new file mode 100644 index 00000000..a31fa289 --- /dev/null +++ b/crates/criticalup-core/src/config/mod.rs @@ -0,0 +1,86 @@ +mod paths; + +use self::paths::Paths; +use crate::errors::Error; +use criticaltrust::keys::PublicKey; + +/// The `Config` struct holds all the configuration of criticalup. It's meant to be created early +/// and passed around the rest of the code. +pub struct Config { + /// Details about the binary. See [`WhitelabelConfig`] for more information. + pub whitelabel: WhitelabelConfig, + /// File system paths criticalup should access. The rest of the code should use the paths + /// provided by the struct instead of constructing their own. This is for `criticalup` + /// binary itself, and not for other tools outside this crate. + pub paths: Paths, +} + +impl Config { + /// Detect and load the criticalup configuration from the execution environment. + pub fn detect(whitelabel: WhitelabelConfig) -> Result { + Self::detect_inner(whitelabel, None) + } + + fn detect_inner( + whitelabel: WhitelabelConfig, + root: Option, + ) -> Result { + let paths = Paths::detect(&whitelabel, root)?; + Ok(Self { whitelabel, paths }) + } + + #[cfg(test)] + pub(crate) fn test(root: std::path::PathBuf) -> Result { + Self::detect_inner(WhitelabelConfig::test(), Some(root)) + } +} + +/// CriticalUp supports the creation of multiple "whitelabeled" binaries, each with their own +/// configuration. Binaries are expected to configure their own details in this struct, and pass +/// it to the library. The configuration is not supposed to be dynamically set at runtime. +pub struct WhitelabelConfig { + /// Name of the program. This influences both the way the binary expects to be called, and the + /// name of the data directory on disk. + pub name: &'static str, + + /// User agent to use when making HTTP/HTTPS requests. + pub http_user_agent: &'static str, + /// URL of the download server criticalup should use. + pub download_server_url: String, + /// URL of the customer portal that user's of criticalup need to set tokens etc. + pub customer_portal_url: String, + + /// Public key used to verify all other public keys imported from the download server. + pub trust_root: PublicKey, + + /// Whether test mocking functionality should be enabled for this binary. Must be `false` on + /// all production criticalup builds, as it's supposed to be used only during tests. + pub test_mode: bool, +} + +impl WhitelabelConfig { + #[cfg(test)] + fn test() -> Self { + use criticaltrust::keys::newtypes::PublicKeyBytes; + use criticaltrust::keys::{KeyAlgorithm, KeyRole}; + + WhitelabelConfig { + name: "criticalup", + + http_user_agent: "criticalup test suite (https://github.com/ferrocene/criticalup)", + download_server_url: "http://0.0.0.0:0".into(), + customer_portal_url: "https://customers-dev.ferrocene.dev".into(), + + // Intentionally broken public key. If a test wants to use a real trust root it needs + // to override the key with a real one (ideally through TestEnvironment). + trust_root: PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::Unknown, + expiry: None, + public: PublicKeyBytes::borrowed(&[]), + }, + + test_mode: true, + } + } +} diff --git a/crates/criticalup-core/src/config/paths.rs b/crates/criticalup-core/src/config/paths.rs new file mode 100644 index 00000000..58a0fb95 --- /dev/null +++ b/crates/criticalup-core/src/config/paths.rs @@ -0,0 +1,173 @@ +use crate::config::WhitelabelConfig; +use crate::errors::Error; +use std::env; +use std::path::{Path, PathBuf}; + +const DEFAULT_INSTALLATION_DIR_NAME: &str = "toolchains"; + +#[cfg_attr(test, derive(Debug, PartialEq))] +pub struct Paths { + pub(crate) state_file: PathBuf, + + pub proxies_dir: PathBuf, + pub installation_dir: PathBuf, + + #[cfg(test)] + pub(crate) root: PathBuf, +} + +impl Paths { + pub(super) fn detect( + whitelabel: &WhitelabelConfig, + root: Option, + ) -> Result { + let root = if let Some(root) = root { + if root != Path::new("") { + root + } else { + find_root(whitelabel).ok_or(Error::CouldNotDetectRootDirectory)? + } + } else { + find_root(whitelabel).ok_or(Error::CouldNotDetectRootDirectory)? + }; + + Ok(Paths { + state_file: root.join("state.json"), + proxies_dir: root.join("bin"), + installation_dir: root.join(DEFAULT_INSTALLATION_DIR_NAME), + #[cfg(test)] + root, + }) + } +} + +fn find_root(whitelabel: &WhitelabelConfig) -> Option { + match env::var_os("CRITICALUP_ROOT") { + Some(val) if val.is_empty() => platform_specific_root(whitelabel), + Some(val) => Some(PathBuf::from(val)), + None => platform_specific_root(whitelabel), + } +} + +fn platform_specific_root(whitelabel: &WhitelabelConfig) -> Option { + dirs::data_dir().map(|v| v.join(whitelabel.name)) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::Path; + + fn not_empty(var: &Option) -> Option<&PathBuf> { + var.as_ref().filter(|path| !path.as_os_str().is_empty()) + } + + #[test] + fn test_calculated_paths() { + assert_eq!( + Paths { + state_file: "/opt/criticalup/state.json".into(), + proxies_dir: "/opt/criticalup/bin".into(), + installation_dir: "/opt/criticalup/toolchains".into(), + root: "/opt/criticalup".into() + }, + Paths::detect(&WhitelabelConfig::test(), Some("/opt/criticalup".into()),).unwrap() + ); + } + + #[test] + fn test_with_explicit_criticalup_home() { + let whitelabel1 = WhitelabelConfig::test(); + + let mut whitelabel2 = WhitelabelConfig::test(); + whitelabel2.name = "test-name"; + + // The value of the CRITICALUP_ROOT environment variable is respected regardless of the + // whitelabel configuration. + for whitelabel in &[whitelabel1, whitelabel2] { + assert_root_is( + "/opt/criticalup", + whitelabel, + Some("/opt/criticalup".into()), + ); + + assert_root_is("/foo/bar", whitelabel, Some("/foo/bar".into())); + + assert_root_is("foo", whitelabel, Some("foo".into())); + + // When the environment variable is empty we're not using it, so the rest of the + // detection code is used, only works on Linux currently. + assert_root_is_not("", whitelabel, Some("".into())); + } + } + + #[test] + + fn test_with_explicit_root() { + let mut wl1 = WhitelabelConfig::test(); + wl1.name = "foo"; + + let mut wl2 = WhitelabelConfig::test(); + wl2.name = "bar"; + + assert_root_is( + "/usr/local/share/foo", + &wl1, + Some("/usr/local/share/foo".into()), + ); + assert_root_is( + "/usr/local/share/bar", + &wl2, + Some("/usr/local/share/bar".into()), + ); + assert_root_is("data/foo", &wl1, Some("data/foo".into())); + assert_root_is("data/bar", &wl2, Some("data/bar".into())); + assert_root_is( + "/home/user/.local/share/foo", + &wl1, + Some("/home/user/.local/share/foo".into()), + ); + assert_root_is( + "/home/pietro/.local/share/bar", + &wl2, + Some("/home/pietro/.local/share/bar".into()), + ); + + // When the environment variable is empty we're not using it, so the rest of the + // detection code is used. + assert_root_is_not("foo", &wl1, Some("bar".into())); + assert_root_is_not("bar", &wl2, Some("foo".into())); + } + + #[test] + fn test_not_empty() { + assert_eq!( + Some(&PathBuf::from("foo")), + not_empty(&Some(PathBuf::from("foo"))) + ); + assert_eq!(None, not_empty(&Some(PathBuf::from("")))); + assert_eq!(None, not_empty(&None)); + } + + fn assert_root_is( + expected: impl AsRef, + whitelabel: &WhitelabelConfig, + root: Option, + ) { + assert_eq!( + expected.as_ref(), + Paths::detect(whitelabel, root).unwrap().root + ); + } + + fn assert_root_is_not( + expected: impl AsRef, + whitelabel: &WhitelabelConfig, + root: Option, + ) { + match Paths::detect(whitelabel, root) { + Ok(paths) => assert_ne!(expected.as_ref(), paths.root), + Err(err) => assert!(matches!(err, Error::CouldNotDetectRootDirectory)), + } + } +} diff --git a/crates/criticalup-core/src/download_server_client.rs b/crates/criticalup-core/src/download_server_client.rs new file mode 100644 index 00000000..5c937362 --- /dev/null +++ b/crates/criticalup-core/src/download_server_client.rs @@ -0,0 +1,283 @@ +use crate::config::Config; +use crate::errors::{DownloadServerError, Error}; +use crate::state::State; +use criticaltrust::keys::PublicKey; +use criticaltrust::manifests::ReleaseManifest; +use criticaltrust::manifests::{KeysManifest, ReleaseArtifactFormat}; +use criticaltrust::signatures::Keychain; +use reqwest::blocking::{Client, RequestBuilder, Response}; +use reqwest::header::{HeaderValue, AUTHORIZATION}; +use reqwest::StatusCode; +use serde::Deserialize; + +pub struct DownloadServerClient { + base_url: String, + client: Client, + state: State, + trust_root: PublicKey, +} + +impl DownloadServerClient { + pub fn new(config: &Config, state: &State) -> Self { + let client = Client::builder() + .user_agent(config.whitelabel.http_user_agent) + .build() + .expect("failed to configure http client"); + + DownloadServerClient { + base_url: config.whitelabel.download_server_url.clone(), + client, + state: state.clone(), + trust_root: config.whitelabel.trust_root.clone(), + } + } + + pub fn get_current_token_data(&self) -> Result { + self.json(self.send_with_auth(self.client.get(self.url("/v1/tokens/current")))?) + } + + pub fn get_keys(&self) -> Result { + let mut keychain = Keychain::new(&self.trust_root).map_err(Error::KeychainInitFailed)?; + + let resp: KeysManifest = self.json(self.send(self.client.get(self.url("/v1/keys")))?)?; + for key in &resp.keys { + // Invalid keys are silently ignored, as they might be signed by a different root key + // used by a different release of criticalup, or they might be using an algorithm not + // supported by the current version of criticaltrust. + let _ = keychain.load(key); + } + + Ok(keychain) + } + + pub fn get_product_release_manifest( + &self, + product: &str, + release: &str, + ) -> Result { + let p = format!("/v1/releases/{product}/{release}"); + self.json(self.send_with_auth(self.client.get(self.url(p.as_str())))?) + } + + pub fn download_package( + &self, + product: &str, + release: &str, + package: &str, + format: ReleaseArtifactFormat, + ) -> Result, Error> { + let artifact_format = format.to_string(); + + let download_url = + format!("/v1/releases/{product}/{release}/download/{package}/{artifact_format}"); + + let response = self.send_with_auth(self.client.get(self.url(download_url.as_str())))?; + let resp_body = response.bytes()?.to_vec(); + Ok(resp_body) + } + + fn url(&self, path: &str) -> String { + format!("{}{path}", self.base_url) + } + + fn send_with_auth(&self, builder: RequestBuilder) -> Result { + // We're constructing the `HeaderValue` manually instead of using the `bearer_token` method + // of `RequestBuilder` as the latter panics when it receives a token not representable + // inside HTTP headers (for example containing the `\r` byte). + // + // If the token contains such chars treat the authentication as failed due to an invalid + // token, as the server wouldn't be able to validate it either anyway. + + // set path to token file for docker + let path_to_token_file = if std::path::Path::new("/.dockerenv").exists() { + Some("/run/secrets/CRITICALUP_TOKEN") + } else { + None + }; + + let header = self + .state + .authentication_token(path_to_token_file) + .as_ref() + .and_then(|token| HeaderValue::from_str(&format!("Bearer {}", token.unseal())).ok()); + + match header { + Some(header) => self.send(builder.header(AUTHORIZATION, header)), + None => Err(self.err_from_request(builder, DownloadServerError::AuthenticationFailed)), + } + } + + fn send(&self, builder: RequestBuilder) -> Result { + let req = builder.build().expect("failed to prepare the http request"); + let url = req.url().to_string(); + let response = self + .client + .execute(req) + .map_err(|e| Error::DownloadServerError { + kind: DownloadServerError::Network(e), + url, + })?; + + Err(self.err_from_response( + &response, + match response.status() { + StatusCode::OK => return Ok(response), + + StatusCode::BAD_REQUEST => DownloadServerError::BadRequest, + StatusCode::FORBIDDEN => DownloadServerError::AuthenticationFailed, + StatusCode::NOT_FOUND => DownloadServerError::NotFound, + StatusCode::TOO_MANY_REQUESTS => DownloadServerError::RateLimited, + + s if s.is_server_error() => DownloadServerError::InternalServerError(s), + s => DownloadServerError::UnexpectedResponseStatus(s), + }, + )) + } + + fn json Deserialize<'de>>(&self, mut response: Response) -> Result { + let mut body = Vec::new(); + response + .copy_to(&mut body) + .map_err(|e| self.err_from_response(&response, DownloadServerError::Network(e)))?; + + serde_json::from_slice(&body).map_err(|e| { + self.err_from_response(&response, DownloadServerError::UnexpectedResponseData(e)) + }) + } + + fn err_from_request(&self, builder: RequestBuilder, kind: DownloadServerError) -> Error { + Error::DownloadServerError { + kind, + url: builder + .build() + .expect("failed to prepare the http request") + .url() + .to_string(), + } + } + + fn err_from_response(&self, response: &Response, kind: DownloadServerError) -> Error { + Error::DownloadServerError { + kind, + url: response.url().to_string(), + } + } +} + +#[derive(Deserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +#[serde(rename_all = "kebab-case")] +pub struct CurrentTokenData { + pub name: String, + pub organization_name: String, + pub expires_at: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::state::AuthenticationToken; + use crate::test_utils::{ + TestEnvironment, SAMPLE_AUTH_TOKEN_CUSTOMER, SAMPLE_AUTH_TOKEN_EXPIRY, + SAMPLE_AUTH_TOKEN_NAME, + }; + use criticaltrust::keys::KeyPair; + use criticaltrust::signatures::PublicKeysRepository; + + #[test] + fn test_get_current_token_while_authenticated() { + let test_env = TestEnvironment::with().download_server().prepare(); + + assert_eq!( + CurrentTokenData { + name: SAMPLE_AUTH_TOKEN_NAME.into(), + organization_name: SAMPLE_AUTH_TOKEN_CUSTOMER.into(), + expires_at: Some(SAMPLE_AUTH_TOKEN_EXPIRY.into()), + }, + test_env.download_server().get_current_token_data().unwrap(), + ); + assert_eq!(1, test_env.requests_served_by_mock_download_server()); + } + + #[test] + fn test_get_current_token_with_unrepresentable_token() { + let test_env = TestEnvironment::with().download_server().prepare(); + test_env + .state() + .set_authentication_token(Some(AuthenticationToken::seal("wrong\0"))); + assert_auth_failed(&test_env); + + // No request was actually made since the authentication token can't be represented in + // HTTP headers. + assert_eq!(0, test_env.requests_served_by_mock_download_server()); + } + + #[test] + fn test_get_current_token_with_wrong_token() { + let test_env = TestEnvironment::with().download_server().prepare(); + test_env + .state() + .set_authentication_token(Some(AuthenticationToken::seal("wrong"))); + assert_auth_failed(&test_env); + + assert_eq!(1, test_env.requests_served_by_mock_download_server()); + } + + #[test] + fn test_get_current_token_with_no_token() { + let test_env = TestEnvironment::with().download_server().prepare(); + test_env.state().set_authentication_token(None); + assert_auth_failed(&test_env); + + // No token was configured, so no request could've been made. + assert_eq!(0, test_env.requests_served_by_mock_download_server()); + } + + #[test] + fn test_get_keys() { + let test_env = TestEnvironment::with().download_server().prepare(); + test_env.state().set_authentication_token(None); // The endpoint requires no authentication. + + let keys = test_env.keys(); + let keychain = test_env.download_server().get_keys().unwrap(); + + for expected_present in &[ + // Trust root included from the whitelabel config + &keys.trust_root, + // Retrieved from the download server + &keys.root, + &keys.packages, + &keys.releases, + &keys.redirects, + ] { + assert!(keychain + .get(&expected_present.public().calculate_id()) + .is_some()); + } + + for expected_missing in &[ + // Not served or provided anywhere + &keys.alternate_trust_root, + // Retrieved from the download server + &keys.alternate_root, + &keys.alternate_packages, + ] { + assert!(keychain + .get(&expected_missing.public().calculate_id()) + .is_none()); + } + } + + fn assert_auth_failed(test_env: &TestEnvironment) { + assert!(matches!( + test_env + .download_server() + .get_current_token_data() + .unwrap_err(), + Error::DownloadServerError { + kind: DownloadServerError::AuthenticationFailed, + .. + }, + )); + } +} diff --git a/crates/criticalup-core/src/errors.rs b/crates/criticalup-core/src/errors.rs new file mode 100644 index 00000000..14947fde --- /dev/null +++ b/crates/criticalup-core/src/errors.rs @@ -0,0 +1,154 @@ +use criticaltrust::Error as TrustError; +use reqwest::Error as ReqError; +use reqwest::StatusCode; +use std::path::PathBuf; + +/// We're using a custom error enum instead of `Box` or one of the crates providing a +/// `Box` wrapper because we need to know all the possible errors criticalup could +/// encounter. Using `Box` makes it too easy to accidentally bubble up a library error +/// without wrapping it into a criticalup-specific error. +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("could not detect the criticalup root directory")] + CouldNotDetectRootDirectory, + + #[error("failed to download {url}")] + DownloadServerError { + url: String, + #[source] + kind: DownloadServerError, + }, + + #[error("state file at {} is not supported by this release (state format version {1})", .0.display())] + UnsupportedStateFileVersion(PathBuf, u32), + #[error("failed to read the criticalup state file at {}", .0.display())] + CantReadStateFile(PathBuf, #[source] std::io::Error), + #[error("failed to write the criticalup state file to {}", .0.display())] + CantWriteStateFile(PathBuf, #[source] WriteFileError), + #[error("failed to parse the criticalup state file at {}, is it corrupt?", .0.display())] + CorruptStateFile(PathBuf, #[source] serde_json::Error), + + #[error("could not find a project manifest in the current or parent directories")] + ProjectManifestDetectionFailed, + #[error("failed to load the project manifest at {} ", .path.display(),)] + ProjectManifestLoadingFailed { + path: PathBuf, + #[source] + kind: ProjectManifestLoadingError, + }, + #[error("failed to create product directory for product {} at {}", .product, .path.display())] + ProjectManifestProductDirCreationFailed { + path: PathBuf, + product: String, + #[source] + source: std::io::Error, + }, + #[error("installation {} does not exist; please run `criticalup install` again", .0)] + InstallationDoesNotExist(String), + + #[error("failed to read the project directory; maybe it is missing?")] + FailedToReadDirectory(#[source] std::io::Error), + + #[error("failed to initialize the keychain used to verify signatures")] + KeychainInitFailed(#[source] TrustError), + + #[error("unknown variable substitution: ${{{0}}}")] + UnknownVariableSubstitution(String), + #[error("unterminated variable")] + UnterminatedVariable, + + #[error(transparent)] + Reqwest(#[from] ReqError), + + #[error("failed to find canonical path for {}", path.display())] + FailedToFindCanonicalPath { + path: PathBuf, + #[source] + kind: std::io::Error, + }, +} + +#[derive(Debug, thiserror::Error)] +pub enum WriteFileError { + #[error(transparent)] + Io(std::io::Error), + #[error("failed to create the parent directory")] + CantCreateParentDirectory(#[source] std::io::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum DownloadServerError { + #[error("failed to authenticate (missing or wrong authentication token)")] + AuthenticationFailed, + #[error("resource not found")] + NotFound, + #[error("invalid request sent to the server")] + BadRequest, + #[error("too many requests, please try later (rate limited)")] + RateLimited, + #[error("an internal error occured on the download server (status code {0})")] + InternalServerError(StatusCode), + #[error("the response from the download server was not expected (status code {0})")] + UnexpectedResponseStatus(StatusCode), + #[error("the contents in the response from the download server were not expected")] + UnexpectedResponseData(#[source] serde_json::Error), + #[error("failed to send the network request")] + Network(#[source] reqwest::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ProjectManifestLoadingError { + #[error("failed to read the file")] + FailedToRead(#[source] std::io::Error), + #[error("failed to parse")] + FailedToParse(#[source] toml_edit::de::Error), + + #[error( + "current version of criticalup does not support multiple products. found {0} products." + )] + MultipleProductsNotSupportedInProjectManifest(usize), + + #[error("the `manifest-version` in your project manifest \ + is smaller than what this release of criticalup supports\n \ + please change the `manifest-version` to {}\n \ + your project manifest version: {}", + .default_supported_version, + .user_version, + )] + ManifestVersionTooSmall { + user_version: u32, + default_supported_version: u32, + }, + + #[error("the `manifest-version` in your project manifest \ + is greater than what this release of criticalup supports\n \ + please update criticalup to the latest version\n \ + your project manifest version: {}", + .user_version, + )] + ManifestVersionTooBig { user_version: u32 }, + + #[error("unknown substitution variable: ${{{0}}}")] + UnknownVariableInSubstitution(String), + #[error("unterminated substitution")] + UnterminatedVariableInSubstitution, +} + +#[derive(Debug, thiserror::Error)] +pub enum BinaryProxyUpdateError { + #[error("failed to list the {} directory", .0.display())] + ListDirectoryFailed(PathBuf, #[source] std::io::Error), + #[error("failed to inspect {}", .0.display())] + InspectFailed(PathBuf, #[source] std::io::Error), + #[error("failed to remove unexpected path {}", .0.display())] + UnexpectedPathRemovalFailed(PathBuf, #[source] std::io::Error), + #[error("failed to create a symlink from {} to {}", .source.display(), .dest.display())] + SymlinkFailed { + source: PathBuf, + dest: PathBuf, + #[source] + inner: std::io::Error, + }, + #[error("failed to create the parent directory {}", .0.display())] + ParentDirectoryCreationFailed(PathBuf, #[source] std::io::Error), +} diff --git a/crates/criticalup-core/src/lib.rs b/crates/criticalup-core/src/lib.rs new file mode 100644 index 00000000..81885693 --- /dev/null +++ b/crates/criticalup-core/src/lib.rs @@ -0,0 +1,12 @@ +pub mod binary_proxies; +pub mod config; +pub mod download_server_client; +pub mod errors; +pub mod project_manifest; + +pub mod state; + +mod utils; + +#[cfg(test)] +mod test_utils; diff --git a/crates/criticalup-core/src/project_manifest/mod.rs b/crates/criticalup-core/src/project_manifest/mod.rs new file mode 100644 index 00000000..7a3b1cc5 --- /dev/null +++ b/crates/criticalup-core/src/project_manifest/mod.rs @@ -0,0 +1,738 @@ +mod substitutions; +mod v1; + +use crate::errors::Error::FailedToFindCanonicalPath; +use crate::errors::ProjectManifestLoadingError::MultipleProductsNotSupportedInProjectManifest; +use crate::errors::{Error, ProjectManifestLoadingError}; +use crate::project_manifest::substitutions::apply_substitutions; +use crate::utils::Sha256Hasher; +use serde::{Deserialize, Serialize}; +use std::env; +use std::hash::{Hash, Hasher}; +use std::ops::{Deref, DerefMut}; +use std::path::{Path, PathBuf}; + +const DEFAULT_PROJECT_MANIFEST_NAME: &str = "criticalup.toml"; +const DEFAULT_PROJECT_MANIFEST_VERSION: u32 = 1; + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct ProjectManifest { + products: Vec, +} + +impl ProjectManifest { + /// Try to find the criticalup.toml project manifest in parent directories. + pub fn discover(base: &Path) -> Result { + let mut search = Some(base); + while let Some(path) = search.take() { + search = path.parent(); + + let candidate = path.join(DEFAULT_PROJECT_MANIFEST_NAME); + if candidate.is_file() { + return Ok(candidate); + } + } + + Err(Error::ProjectManifestDetectionFailed) + } + + /// Find the absolute path to the manifest. + /// + /// The path, which is optionally provided by the user could be relative, but we need the + /// absolute path for state file. + /// + /// If the project path is provided then it could be a relative path. In that case, find the + /// full path to the criticalup.toml. + /// + /// If the path is not provided then tries to find the manifest iterating over parent + /// directories looking for one, and stopping at the closest parent directory with the file. + pub fn discover_canonical_path(project_path: Option<&Path>) -> Result { + let curr_directory = env::current_dir().map_err(Error::FailedToReadDirectory)?; + match project_path { + Some(path) => { + Ok( + std::fs::canonicalize(path).map_err(|err| FailedToFindCanonicalPath { + path: path.to_path_buf(), + kind: err, + })?, + ) + } + None => { + let path = ProjectManifest::discover(&curr_directory)?; + Ok(std::fs::canonicalize(&path) + .map_err(|err| FailedToFindCanonicalPath { path, kind: err })?) + } + } + } + + /// Try to parse and return the `ProjectManifest` object. + pub fn load(path: &Path) -> Result { + load_inner(path).map_err(|kind| Error::ProjectManifestLoadingFailed { + path: path.into(), + kind, + }) + } + + /// Find the project manifest and parse it. + /// + /// This function tries to load the manifest for a given path. If the path is not provided + /// then tries to find the manifest iterating over parent directories looking for one, and + /// stopping at the closest parent directory with the file. + /// + /// This is a combination of existing functions `Self::load()` and `Self::discover()` for ease + /// of use. + pub fn get(project_path: Option) -> Result { + let manifest = match project_path { + Some(manifest_path) => ProjectManifest::load(&manifest_path)?, + None => { + let discovered_manifest = Self::discover_canonical_path(None)?; + Self::load(discovered_manifest.as_path())? + } + }; + Ok(manifest) + } + + pub fn products(&self) -> &[ProjectManifestProduct] { + &self.products + } + + /// Generates a directory for each product under the specified `root`. + /// + /// If the directory already exists, then just skips the creation. + pub fn create_products_dirs(&self, installation_dir: &Path) -> std::io::Result<()> { + let products = self.products(); + for product in products { + std::fs::create_dir_all(installation_dir.join(&product.installation_id()))?; + } + + Ok(()) + } +} + +/// Keeping packages sorted requires this wrapper newtype pattern. +/// +/// Deref and DerefMut are implemented for this type to keep things as smooth as possible +/// with the least amount of breaking changes, if any. +#[derive(Debug, PartialEq, Eq)] +struct Packages(Vec); + +impl Hash for Packages { + /// Packages hash to be done only on sorted packages. + /// + /// In-place sorting within a method is not advisable as it is not explicit and can fall + /// through the cracks. + fn hash(&self, state: &mut H) { + let mut this = self.0.clone(); + this.sort(); + this.hash(state) + } +} + +impl Deref for Packages { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + self.0.as_ref() + } +} + +impl DerefMut for Packages { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_mut() + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct ProjectManifestProduct { + name: String, + release: String, + packages: Packages, +} + +impl ProjectManifestProduct { + pub fn name(&self) -> &str { + &self.name + } + + pub fn release(&self) -> &str { + &self.release + } + + pub fn packages(&self) -> &[String] { + &self.packages + } + + pub fn installation_id(&self) -> InstallationId { + // For now this generates the ID using hash of the product object. + let mut hasher = Sha256Hasher::new(); + self.hash(&mut hasher); + InstallationId(hasher.finalize()) + } + + /// Generates a directory for the product under the specified `root`. If the directory already + /// exists, then just skips the creation. + pub fn create_product_dir(&self, installation_dir: &Path) -> Result<(), Error> { + let product_dir_name = self.installation_id(); + let abs_installation_dir_path: PathBuf = [installation_dir, product_dir_name.as_ref()] + .iter() + .collect(); + let _res: Result<(), std::io::Error> = + match std::fs::create_dir_all(abs_installation_dir_path.clone()) { + Ok(_) => Ok(()), + Err(err) => { + return Err(Error::ProjectManifestProductDirCreationFailed { + path: abs_installation_dir_path, + product: self.name.clone(), + source: err, + }) + } + }; + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[serde(transparent)] +pub struct InstallationId(pub String); + +impl AsRef for InstallationId { + fn as_ref(&self) -> &Path { + self.0.as_ref() + } +} + +impl Deref for InstallationId { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.0.as_str() + } +} + +#[derive(Deserialize)] +#[serde(rename_all = "kebab-case")] +struct VersionDetector { + manifest_version: u32, +} + +fn load_inner(path: &Path) -> Result { + let mut products = Vec::new(); + + let contents = std::fs::read(path).map_err(ProjectManifestLoadingError::FailedToRead)?; + + // We first deserialize only the `manifest_version` field, which must be present in all + // past and future versions, and then based on the version we properly deserialize. + let version: VersionDetector = + toml_edit::de::from_slice(&contents).map_err(ProjectManifestLoadingError::FailedToParse)?; + match version.manifest_version { + DEFAULT_PROJECT_MANIFEST_VERSION => { + let manifest: v1::ProjectManifest = toml_edit::de::from_slice(&contents) + .map_err(ProjectManifestLoadingError::FailedToParse)?; + + for (name, product) in manifest.products.into_iter() { + let mut packages = Packages( + product + .packages + .iter() + .map(|p| apply_substitutions(p)) + .collect::, ProjectManifestLoadingError>>()?, + ); + packages.sort(); + + products.push(ProjectManifestProduct { + name, + release: apply_substitutions(&product.release)?, + packages, + }); + } + } + small if small < DEFAULT_PROJECT_MANIFEST_VERSION => { + return Err(ProjectManifestLoadingError::ManifestVersionTooSmall { + user_version: small, + default_supported_version: DEFAULT_PROJECT_MANIFEST_VERSION, + }) + } + large => { + return Err(ProjectManifestLoadingError::ManifestVersionTooBig { + user_version: large, + }) + } + } + + products.sort_by(|a, b| a.name.cmp(&b.name)); + + if products.len() > 1 { + return Err(MultipleProductsNotSupportedInProjectManifest( + products.len(), + )); + } + + Ok(ProjectManifest { products }) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn write_sample_manifest(dir: &Path) { + const SAMPLE: &str = "\ + manifest-version = 1\n\ + \n\ + [products.sample]\n\ + release = \"foo\"\n\ + packages = [\"bar\"]\n\ + "; + + std::fs::create_dir_all(dir).unwrap(); + std::fs::write(dir.join("criticalup.toml"), SAMPLE.as_bytes()).unwrap(); + } + + mod test_discover { + use super::*; + use std::env::set_current_dir; + + #[test] + fn test_current_directory() { + let root = tempfile::tempdir().unwrap(); + write_sample_manifest(root.path()); + let discovered_manifest_path = ProjectManifest::discover(root.path()).unwrap(); + assert_sample_parsed( + ProjectManifest::load(discovered_manifest_path.as_path()).unwrap(), + ); + } + + #[test] + fn test_parent_directory() { + let root = tempfile::tempdir().unwrap(); + write_sample_manifest(root.path()); + let discovered_manifest_path = + ProjectManifest::discover(&root.path().join("child")).unwrap(); + assert_sample_parsed( + ProjectManifest::load(discovered_manifest_path.as_path()).unwrap(), + ); + } + + #[test] + fn test_two_parent_directories() { + let root = tempfile::tempdir().unwrap(); + write_sample_manifest(root.path()); + let discovered_manifest_path = + ProjectManifest::discover(&root.path().join("child").join("grandchild")).unwrap(); + assert_sample_parsed( + ProjectManifest::load(discovered_manifest_path.as_path()).unwrap(), + ); + } + + #[test] + fn test_child_directory() { + let root = tempfile::tempdir().unwrap(); + write_sample_manifest(&root.path().join("child")); + + assert!(matches!( + ProjectManifest::discover(root.path()).unwrap_err(), + Error::ProjectManifestDetectionFailed + )); + } + + #[test] + #[ignore = "Testing manifest discovery while setting current directory will be enabled at a later date."] + fn discover_canonical_path_matches_current_manifest_canonical_path() { + let root = tempfile::tempdir().unwrap(); + let expected_project_path = root.path().join("project").join("awesome"); + write_sample_manifest(&expected_project_path); + + // We move into the directory to simulate being in the project directory. + set_current_dir(&expected_project_path).unwrap(); + + #[cfg(not(any(target_os = "macos", target_os = "windows")))] + let discovered_abs_path = ProjectManifest::discover_canonical_path(None).unwrap(); + #[cfg(not(any(target_os = "macos", target_os = "windows")))] + let expected_project_path = + std::fs::canonicalize(expected_project_path.join("criticalup.toml")).unwrap(); + + #[cfg(target_os = "macos")] + let discovered_abs_path = ProjectManifest::discover_canonical_path(None) + .unwrap() + .strip_prefix("/private") + .unwrap() + .to_path_buf(); + #[cfg(target_os = "macos")] + let expected_project_path = expected_project_path + .join("criticalup.toml") + .strip_prefix("/") + .unwrap() + .to_path_buf(); + + #[cfg(target_os = "windows")] + let discovered_abs_path = ProjectManifest::discover_canonical_path(None).unwrap(); + // We need to canonicalize this side as well because Windows canonical paths + // add an extra oomph as prefix \\?\. + // https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#unc-paths + #[cfg(target_os = "windows")] + let expected_project_path = + std::fs::canonicalize(expected_project_path.join("criticalup.toml")).unwrap(); + + assert_eq!(discovered_abs_path, expected_project_path); + } + + #[test] + fn test_two_child_directories() { + let root = tempfile::tempdir().unwrap(); + write_sample_manifest(&root.path().join("child").join("grandchild")); + + assert!(matches!( + ProjectManifest::discover(root.path()).unwrap_err(), + Error::ProjectManifestDetectionFailed + )); + } + + #[test] + fn test_no_file() { + assert!(matches!( + ProjectManifest::discover(tempfile::tempdir().unwrap().path()).unwrap_err(), + Error::ProjectManifestDetectionFailed + )); + } + + #[track_caller] + fn assert_sample_parsed(manifest: ProjectManifest) { + assert_eq!( + ProjectManifest { + products: vec![ProjectManifestProduct { + name: "sample".into(), + release: "foo".into(), + packages: Packages(vec!["bar".into()]), + }] + }, + manifest + ); + } + } + + mod test_load { + use super::*; + use std::env::set_current_dir; + + #[test] + fn test_read_failure() { + let root = tempfile::tempdir().unwrap(); + let bad_path = root.path().join("doesnt-exist.toml"); + + assert!(matches!( + ProjectManifest::load(&bad_path).unwrap_err(), + Error::ProjectManifestLoadingFailed { + path, + kind: ProjectManifestLoadingError::FailedToRead(io), + } if path == bad_path && io.kind() == std::io::ErrorKind::NotFound, + )); + } + + #[test] + fn test_invalid_toml() { + assert_load_error("\0", |e| { + matches!(e, ProjectManifestLoadingError::FailedToParse(_)) + }); + } + + #[test] + fn test_missing_manifest_version() { + assert_load_error("foo = 1", |e| { + matches!(e, ProjectManifestLoadingError::FailedToParse(_)) + }); + } + + #[test] + fn test_unsupported_manifest_version() { + assert_load_error("manifest-version = 0", |e| { + matches!( + e, + ProjectManifestLoadingError::ManifestVersionTooSmall { + user_version: 0, + default_supported_version: DEFAULT_PROJECT_MANIFEST_VERSION, + } + ) + }); + + assert_load_error("manifest-version = 2", |e| { + matches!( + e, + ProjectManifestLoadingError::ManifestVersionTooBig { user_version: 2 } + ) + }); + } + + #[test] + fn test_v1_empty() { + assert_load( + "manifest-version = 1", + ProjectManifest { + products: Vec::new(), + }, + ); + } + + #[test] + fn test_v1_one_product() { + assert_load( + r#" + manifest-version = 1 + + [products.sample] + release = "foo" + packages = ["bar", "baz"] + "#, + ProjectManifest { + products: vec![ProjectManifestProduct { + name: "sample".into(), + release: "foo".into(), + packages: Packages(vec!["bar".into(), "baz".into()]), + }], + }, + ); + } + + #[test] + #[ignore = "Temporarily disabled until support for multiple products is enabled."] + fn test_v1_multiple_products() { + // This also tests whether sorting works. + assert_load( + r#" + manifest-version = 1 + + [products.sample] + release = "foo" + packages = ["bar", "baz"] + + [products.demo] + release = "@foo/latest" + packages = ["b", "a"] + "#, + ProjectManifest { + products: vec![ + ProjectManifestProduct { + name: "demo".into(), + release: "@foo/latest".into(), + packages: Packages(vec!["a".into(), "b".into()]), + }, + ProjectManifestProduct { + name: "sample".into(), + release: "foo".into(), + packages: Packages(vec!["bar".into(), "baz".into()]), + }, + ], + }, + ); + } + + #[test] + fn test_v1_multiple_products_not_supported() { + let root = tempfile::tempdir().unwrap(); + let path = root.path().join("criticalup.toml"); + let contents = r#" + manifest-version = 1 + + [products.sample] + release = "foo" + packages = ["bar", "baz"] + + [products.demo] + release = "@foo/latest" + packages = ["b", "a"] + "#; + + std::fs::write(path, contents.as_bytes()).unwrap(); + assert_load_error(contents, |err| { + matches!(err, MultipleProductsNotSupportedInProjectManifest(2)) + }); + } + + #[test] + fn test_v1_substitutions() { + assert_load( + r#" + manifest-version = 1 + + [products.sample] + release = "${rustc-host}" + packages = ["foo-${rustc-host}"] + "#, + ProjectManifest { + products: vec![ProjectManifestProduct { + name: "sample".into(), + release: env!("TARGET").into(), + packages: Packages(vec![concat!("foo-", env!("TARGET")).into()]), + }], + }, + ); + } + + #[test] + fn test_v1_missing_required_fields() { + assert_load_error( + r#" + manifest-version = 1 + + [products.sample] + release = "foo" + "#, + |e| matches!(e, ProjectManifestLoadingError::FailedToParse(_)), + ); + } + + #[test] + fn test_v1_extra_unknown_fields() { + assert_load_error( + r#" + manifest-version = 1 + foo = 1 + + [products.sample] + release = "foo" + packages = ["bar"] + "#, + |e| matches!(e, ProjectManifestLoadingError::FailedToParse(_)), + ); + assert_load_error( + r#" + manifest-version = 1 + + [products.sample] + release = "foo" + packages = ["bar"] + foo = 1 + "#, + |e| matches!(e, ProjectManifestLoadingError::FailedToParse(_)), + ); + } + + #[test] + fn test_v1_invalid_substitutions() { + assert_load_error( + r#" + manifest-version = 1 + + [products.sample] + release = "foo" + packages = ["${rustc-host"] + "#, + |e| { + matches!( + e, + ProjectManifestLoadingError::UnterminatedVariableInSubstitution + ) + }, + ); + } + + #[track_caller] + fn assert_load(contents: &str, expected: ProjectManifest) { + let root = tempfile::tempdir().unwrap(); + let path = root.path().join("criticalup.toml"); + + std::fs::write(&path, contents.as_bytes()).unwrap(); + assert_eq!(expected, ProjectManifest::load(&path).unwrap()); + } + + #[track_caller] + fn assert_load_error( + contents: &str, + error_check: impl FnOnce(&ProjectManifestLoadingError) -> bool, + ) { + let root = tempfile::tempdir().unwrap(); + let bad_path = root.path().join("criticalup.toml"); + + std::fs::write(&bad_path, contents.as_bytes()).unwrap(); + + let mut supported_versions: Vec = [1, 3, 5].into(); + supported_versions.sort(); + + let err = ProjectManifest::load(&bad_path).unwrap_err(); + if let Error::ProjectManifestLoadingFailed { path, kind } = &err { + assert_eq!(&bad_path, path); + assert!(error_check(kind)); + } + } + + #[test] + #[ignore = "Testing manifest discovery while setting current directory will be enabled at a later date."] + fn get_loaded_manifest_by_discovering() { + let root = tempfile::tempdir().unwrap(); + let awesome_project_path = root.path().join("project").join("awesome"); + write_sample_manifest(&awesome_project_path); + + set_current_dir(&awesome_project_path).unwrap(); + let discovered_manifest = ProjectManifest::get(None).unwrap(); + let direct_loaded_manifest = + ProjectManifest::load(awesome_project_path.join("criticalup.toml").as_path()) + .unwrap(); + assert_eq!(discovered_manifest, direct_loaded_manifest); + } + } + + mod test_product { + use crate::project_manifest::{InstallationId, Packages, ProjectManifestProduct}; + + #[test] + fn test_installation_id_generation() { + let product1 = ProjectManifestProduct { + name: "dir_name_tester".to_string(), + release: "1.523231341324".to_string(), + packages: Packages(vec![]), + }; + assert_eq!( + InstallationId( + "88ae6c4f87f8b450cef620983f00ac440a0b2dd6c2b7a1f04185b917d7a51c84".into() + ), + product1.installation_id(), + ); + + let product2 = ProjectManifestProduct { + name: "dir_name_tester".to_string(), + release: "1.523231341324".to_string(), + packages: Packages(vec!["package 2".to_string(), "package 1".to_string()]), + }; + assert_eq!( + InstallationId( + "b1eb7dd657b436a540549b2f2adf0cfcdef50233487de50c404ac1510e9d0868".into() + ), + product2.installation_id(), + ); + } + + #[test] + fn test_create_success() { + let root = tempfile::tempdir().unwrap(); + let installation_dir = root.path().join("toolchains"); + let product1 = ProjectManifestProduct { + name: "product1".into(), + release: "@foo/latest".into(), + packages: Packages(vec!["b".into(), "a".into()]), + }; + + let product1_id = product1.installation_id(); + + let product2 = ProjectManifestProduct { + name: "product2".into(), + release: "foo".into(), + packages: Packages(vec!["bar".into(), "baz".into()]), + }; + + let product2_id = product2.installation_id(); + + let test_manifest = crate::project_manifest::ProjectManifest { + products: vec![product1, product2], + }; + + // Main project dir is created along with product dirs. + let _ = test_manifest.create_products_dirs(&installation_dir); + assert!(installation_dir.exists()); + + // A dir per product within the project dir. + assert!(installation_dir.join(product1_id).exists()); + assert!(installation_dir.join(product2_id).exists()); + assert!(!installation_dir + .join("NEVERGONNAGIVEYOUUPNEVERGONNALETYOUDOWN") + .exists()); + } + } +} diff --git a/crates/criticalup-core/src/project_manifest/substitutions.rs b/crates/criticalup-core/src/project_manifest/substitutions.rs new file mode 100644 index 00000000..d205c425 --- /dev/null +++ b/crates/criticalup-core/src/project_manifest/substitutions.rs @@ -0,0 +1,104 @@ +use crate::errors::ProjectManifestLoadingError; + +const VARIABLE_START: &str = "${"; +const VARIABLE_END: &str = "}"; + +enum ParseState { + Raw, + Variable, +} + +pub(super) fn apply_substitutions(mut input: &str) -> Result { + let mut state = ParseState::Raw; + let mut result = String::new(); + + loop { + match state { + ParseState::Raw => { + if let Some(start) = input.find(VARIABLE_START) { + result.push_str(&input[..start]); + + input = &input[(start + VARIABLE_START.len())..]; + state = ParseState::Variable; + } else { + // End of the input + result.push_str(input); + return Ok(result); + } + } + ParseState::Variable => { + if let Some(end) = input.find(VARIABLE_END) { + result.push_str(&apply_substitution(&input[..end])?); + + input = &input[(end + VARIABLE_END.len())..]; + state = ParseState::Raw; + } else { + // End of the input + return Err(ProjectManifestLoadingError::UnterminatedVariableInSubstitution); + } + } + } + } +} + +fn apply_substitution(variable: &str) -> Result { + match variable { + "rustc-host" => Ok(env!("TARGET").into()), + other => Err(ProjectManifestLoadingError::UnknownVariableInSubstitution( + other.into(), + )), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_apply_substitutions() { + assert_eq!("hello world", apply_substitutions("hello world").unwrap()); + assert_eq!( + env!("TARGET"), + apply_substitutions("${rustc-host}").unwrap() + ); + assert_eq!( + concat!("hello ", env!("TARGET")), + apply_substitutions("hello ${rustc-host}").unwrap() + ); + assert_eq!( + concat!("hello ", env!("TARGET"), "!"), + apply_substitutions("hello ${rustc-host}!").unwrap() + ); + assert_eq!( + concat!("hello ", env!("TARGET"), "}"), + apply_substitutions("hello ${rustc-host}}").unwrap() + ); + + assert!(matches!( + apply_substitutions("hello ${").unwrap_err(), + ProjectManifestLoadingError::UnterminatedVariableInSubstitution + )); + assert!(matches!( + apply_substitutions("hello ${missing-var}!").unwrap_err(), + ProjectManifestLoadingError::UnknownVariableInSubstitution(s) if s == "missing-var" + )); + assert!(matches!( + apply_substitutions("hello ${}!").unwrap_err(), + ProjectManifestLoadingError::UnknownVariableInSubstitution(s) if s.is_empty() + )); + } + + #[test] + fn test_apply_substitution() { + assert_eq!(env!("TARGET"), apply_substitution("rustc-host").unwrap()); + + assert!(matches!( + apply_substitution("rustc_host").unwrap_err(), + ProjectManifestLoadingError::UnknownVariableInSubstitution(s) if s == "rustc_host" + )); + assert!(matches!( + apply_substitution("").unwrap_err(), + ProjectManifestLoadingError::UnknownVariableInSubstitution(s) if s.is_empty() + )); + } +} diff --git a/crates/criticalup-core/src/project_manifest/v1.rs b/crates/criticalup-core/src/project_manifest/v1.rs new file mode 100644 index 00000000..d9df1523 --- /dev/null +++ b/crates/criticalup-core/src/project_manifest/v1.rs @@ -0,0 +1,18 @@ +use serde::Deserialize; +use std::collections::HashMap; + +#[derive(Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub(super) struct ProjectManifest { + #[allow(unused)] + manifest_version: u32, + #[serde(default)] + pub(super) products: HashMap, +} + +#[derive(Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub(super) struct ProjectManifestProduct { + pub(super) release: String, + pub(super) packages: Vec, +} diff --git a/crates/criticalup-core/src/state.rs b/crates/criticalup-core/src/state.rs new file mode 100644 index 00000000..ad16748c --- /dev/null +++ b/crates/criticalup-core/src/state.rs @@ -0,0 +1,980 @@ +use std::cell::{Ref, RefCell}; +use std::collections::{BTreeMap, BTreeSet}; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::rc::Rc; + +use serde::{Deserialize, Serialize}; + +use criticaltrust::integrity::VerifiedPackage; + +use crate::config::Config; +use crate::errors::Error; +use crate::errors::Error::InstallationDoesNotExist; +use crate::errors::WriteFileError; +use crate::project_manifest::InstallationId; +use crate::utils::open_file_for_write; + +const CURRENT_FORMAT_VERSION: u32 = 1; +const CRITICALUP_TOKEN_ENV_VAR_NAME: &str = "CRITICALUP_TOKEN"; + +#[derive(Clone)] +pub struct State { + inner: Rc>, +} + +impl State { + /// Construct the `State` object by loading the content from state file from disk. + pub fn load(config: &Config) -> Result { + let path = config.paths.state_file.clone(); + + let repr = match std::fs::read(&path) { + Ok(contents) => serde_json::from_slice(&contents) + .map_err(|e| Error::CorruptStateFile(path.clone(), e))?, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => StateRepr::default(), + Err(err) => return Err(Error::CantReadStateFile(path, err)), + }; + + if repr.version != CURRENT_FORMAT_VERSION { + return Err(Error::UnsupportedStateFileVersion(path, repr.version)); + } + + Ok(State { + inner: Rc::new(RefCell::new(StateInner { path, repr })), + }) + } + + /// Returns the authentication token. + /// + /// Attempts to read from: + /// 1. `token_path` (if present) + /// 2. The `CRITICALUP_TOKEN_ENV_VAR_NAME` environment + /// 3. The state + /// 1. check if token_path was sent in the fn call + /// 2. if not, then try to see if the env var is set + /// 3. if that was not set then look at the State + /// 4. else, None + pub fn authentication_token(&self, token_path: Option<&str>) -> Option { + match token_path { + Some(token_path) => { + let token_path = std::path::Path::new(token_path); + if token_path.exists() { + match std::fs::read_to_string(token_path) { + Ok(token) => Some(AuthenticationToken(token.to_string().trim().into())), + Err(_) => self.authentication_token_inner(), + } + } else { + self.authentication_token_inner() + } + } + None => self.authentication_token_inner(), + } + } + + /// Returns the authentication token. + /// + /// Attempts to read from: + /// 1. The `CRITICALUP_TOKEN_ENV_VAR_NAME` environment + /// 2. The state + fn authentication_token_inner(&self) -> Option { + match std::env::var(CRITICALUP_TOKEN_ENV_VAR_NAME) { + Ok(token_from_env) => Some(AuthenticationToken(token_from_env)), + Err(_) => { + let borrowed = self.inner.borrow(); + borrowed.repr.authentication_token.clone() + } + } + } + + pub fn set_authentication_token(&self, token: Option) { + self.inner.borrow_mut().repr.authentication_token = token; + } + + /// Adds or selectively installation in the State for a given `InstallationId`, + /// a given Manifest path and verified packages. + /// + /// Creates or overrides installation for a given unique `InstallationId`. If you merely want + /// to update/append more manifest paths then use `Self::update_installation_manifests` method. + /// + /// Also, removes the manifest path from older installations. + /// + /// We need to check the following to make a decision on what to do with the installation + /// within the State and also what to do with the manifests within those installations: + /// - State file + /// - Installation directory + /// - Manifest path(s) + /// + /// The following table will help figure out the match pattern below. + /// + /// +===========+===========+===========================================================+ + /// | In State? | On Disk? | Result | + /// +===========+===========+===========================================================+ + /// | true | true | Update existing installation | + /// +-----------+-----------+-----------------------------------------------------------+ + /// | false | _ | Create new installation | + /// +-----------+-----------+-----------------------------------------------------------+ + /// | true | false | Remove the older installations from the State, create new | + /// +-----------+-----------+-----------------------------------------------------------+ + pub fn add_installation( + &self, + installation_id: &InstallationId, + packages: &[VerifiedPackage], + manifest: &Path, + config: &Config, + ) -> Result<(), Error> { + // Get the canonical path so all platforms are consistent. + let manifest = canonicalize_or_err(manifest)?; + let mut inner = self.inner.borrow_mut(); + let existing_installation_in_state_exists = + inner.repr.installations.get(installation_id).is_some(); + let installation_path_on_disk_exists = config + .paths + .installation_dir + .join(&installation_id.0) + .exists(); + match ( + existing_installation_in_state_exists, + installation_path_on_disk_exists, + ) { + (true, true) => { + inner.update_installation_manifests(installation_id, &manifest)?; + } + + (false, _) => { + inner.remove_manifest_from_all_installations(&manifest); + + // Create the new installation for provided manifest. + let manifests = BTreeSet::from([manifest]); + inner.repr.installations.insert( + installation_id.clone(), + StateInstallation { + manifests, + binary_proxies: packages + .iter() + .flat_map(|package| package.proxies_paths.iter()) + .map(|(k, v)| (k.clone(), v.into())) + .collect(), + }, + ); + } + (true, false) => { + eprintln!( + "Installation in the State exists but the installation directory is missing." + ); + inner.repr.installations.remove(installation_id); + } + } + Ok(()) + } + + /// Updates an existing installation using `InstallationId` by appending manifest path for + /// a new project using a manifest that has an existing installation. + /// + /// Also, removes the manifest path from older installations. + pub fn update_installation_manifests( + &self, + installation_id: &InstallationId, + manifest_path: &Path, + ) -> Result<(), Error> { + // Get the canonical path so all platforms are consistent. + let manifest = canonicalize_or_err(manifest_path)?; + let mut inner = self.inner.borrow_mut(); + inner.update_installation_manifests(installation_id, &manifest) + } + + /// Removes a manifest path from all installations and returns the list of `InstallationId`s + /// that had the said manifest. + pub fn remove_manifest_from_all_installations( + &self, + manifest_path: &Path, + ) -> Result, Error> { + // Get the canonical path so all platforms are consistent. + let manifest = canonicalize_or_err(manifest_path)?; + let mut inner = self.inner.borrow_mut(); + Ok(inner.remove_manifest_from_all_installations(&manifest)) + } + + /// Remove an installation from the `State` for a given `InstallationId`. + pub fn remove_installation(&self, installation_id: &InstallationId) { + self.inner + .borrow_mut() + .repr + .installations + .remove(installation_id); + } + + pub fn resolve_binary_proxy( + &self, + installation: &InstallationId, + name: &str, + ) -> Option { + self.inner + .borrow() + .repr + .installations + .get(installation) + .and_then(|i| i.binary_proxies.get(name)) + .map(|name| name.into()) + } + + /// Gets all the installations listed in the `State` file. + pub fn installations(&self) -> Ref> { + Ref::map(self.inner.borrow(), |v| &v.repr.installations) + } + + pub fn all_binary_proxy_names(&self) -> Vec { + let mut result: Vec<_> = self + .inner + .borrow() + .repr + .installations + .values() + .flat_map(|installation| installation.binary_proxies.keys()) + .cloned() + .collect(); + + result.sort(); + result.dedup(); + result + } + + pub fn persist(&self) -> Result<(), Error> { + let inner = self.inner.borrow(); + + // According to the serde_json documentation, the only two reasons this could fail is if + // either the serialize implementation returns an error, or a map has non-string keys. With + // our schema neither of these are supposed to happen, so if we fail serialization it's a + // criticalup bug and we shoiuld abort. + let mut serialized = serde_json::to_vec_pretty(&inner.repr) + .expect("state file serialization unexpectedly failed"); + serialized.push(b'\n'); + + let mut f = open_file_for_write(&inner.path) + .map_err(|e| Error::CantWriteStateFile(inner.path.clone(), e))?; + f.write_all(&serialized) + .map_err(|e| Error::CantWriteStateFile(inner.path.clone(), WriteFileError::Io(e)))?; + + Ok(()) + } +} + +/// Helper for any method or function in State to canonicalize the manifest path. +fn canonicalize_or_err(manifest_path: &Path) -> Result { + let manifest = + manifest_path + .canonicalize() + .map_err(|err| Error::FailedToFindCanonicalPath { + path: manifest_path.to_path_buf(), + kind: err, + })?; + Ok(manifest) +} + +struct StateInner { + path: PathBuf, + repr: StateRepr, +} + +impl StateInner { + /// Removes a manifest path from all installations and returns the list of `InstallationId`s + /// that had the said manifest. + fn remove_manifest_from_all_installations( + &mut self, + manifest_path: &Path, + ) -> Vec { + let all_installations_for_given_manifest = self + .repr + .installations + .iter() + .filter(|installation| installation.to_owned().1.manifests.contains(manifest_path)) + .map(|installation| installation.0.to_owned()) + .collect::>(); + + for id in &all_installations_for_given_manifest { + let ins = self.repr.installations.get_mut(id); + if let Some(state_installation) = ins { + state_installation.manifests.remove(manifest_path); + } + } + + all_installations_for_given_manifest + } + + /// Updates an existing installation using `InstallationId` by appending manifest path for a new + /// project that has an existing installation. + /// + /// Also, removes the manifest path from older installations. + fn update_installation_manifests( + &mut self, + installation_id: &InstallationId, + manifest: &Path, + ) -> Result<(), Error> { + // Start by removing the manifest from all installations. This function takes care of + // deleting the installation where this manifest was the last manifest before removal. + self.remove_manifest_from_all_installations(manifest); + + match self.repr.installations.get_mut(installation_id) { + Some(installation) => { + let _ = installation.manifests.insert(manifest.to_path_buf()); + Ok(()) + } + // Maybe this arm can use some tracing. + None => Err(InstallationDoesNotExist(installation_id.0.to_owned())), + } + } +} + +#[derive(Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq, Eq, Debug))] +struct StateRepr { + version: u32, + authentication_token: Option, + #[serde(default)] + installations: BTreeMap, +} + +impl Default for StateRepr { + fn default() -> Self { + Self { + version: CURRENT_FORMAT_VERSION, + authentication_token: None, + installations: BTreeMap::new(), + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +#[cfg_attr(test, derive(PartialEq, Eq, Debug))] +pub struct StateInstallation { + binary_proxies: BTreeMap, + #[serde(default)] + manifests: BTreeSet, +} + +impl StateInstallation { + /// Get all manifests for a given `StateInstallation`. + pub fn manifests(&self) -> &BTreeSet { + &self.manifests + } +} + +#[derive(Clone, Serialize, Deserialize)] +#[cfg_attr(test, derive(PartialEq, Eq))] +pub struct AuthenticationToken(String); + +impl AuthenticationToken { + pub fn seal(token: &str) -> Self { + AuthenticationToken(token.into()) + } + + pub fn unseal(&self) -> &str { + &self.0 + } +} + +impl std::fmt::Debug for AuthenticationToken { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // We don't want to accidentally include the full authentication token in debug reprs or + // log messages, to avoid leaking it if customers share their criticalup output. This + // custom debug repr redacts all but the last 3 chars, if the string is long enough that + // doing so wouldn't compromise the security of the token. + + const PLAINTEXT_TRAILING_CHARS: usize = 3; + const REDACT_ALL_WHEN_SHORTER_THAN: usize = 9; + + let encoded = if self.0.len() < REDACT_ALL_WHEN_SHORTER_THAN { + self.0.chars().map(|_| '*').collect::() + } else { + self.0 + .char_indices() + .map(|(i, c)| { + if self.0.len() - i > PLAINTEXT_TRAILING_CHARS { + '*' + } else { + c + } + }) + .collect::() + }; + + f.write_str(&encoded) + } +} + +#[cfg(test)] +mod tests { + use crate::test_utils::TestEnvironment; + + use super::*; + + macro_rules! btreemap { + ($($key:expr => $value:expr),*$(,)?) => {{ + let mut map = std::collections::BTreeMap::new(); + $(map.insert($key.into(), $value.into());)* + map + }} + } + + #[test] + fn test_load_state_without_existing_file() { + let test = TestEnvironment::prepare(); + + assert!(!test.config().paths.state_file.exists()); + + let state = State::load(test.config()).unwrap(); + assert_eq!(StateRepr::default(), state.inner.borrow().repr); + } + + #[test] + fn test_load_state_with_existing_file() { + let test_env = TestEnvironment::prepare(); + + std::fs::write( + &test_env.config().paths.state_file, + serde_json::to_vec_pretty(&StateRepr { + version: CURRENT_FORMAT_VERSION, + authentication_token: Some(AuthenticationToken("hello".into())), + installations: BTreeMap::new(), + }) + .unwrap(), + ) + .unwrap(); + + let state = State::load(test_env.config()).unwrap(); + assert_eq!( + Some(AuthenticationToken("hello".into())), + state.authentication_token(None) + ); + } + + #[test] + fn save_same_manifest_content_new_proj_if_existing_installation() { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let state = test_env.state(); + + // Prepare env with one installation that has one manifest file path. + let installation_id = InstallationId("installation-id-1".to_string()); + let binary_proxies: BTreeMap = btreemap! { + "cargo".to_string() => format!("/path/to/{}/bin/cargo", installation_id.0), + "rustc".to_string() => format!("/path/to/{}/bin/rustc", installation_id.0), + }; + let verified_package = VerifiedPackage { + product: "ferrocene".to_string(), + package: "rusty".to_string(), + proxies_paths: binary_proxies, + }; + + // Add installation and write the state file. + let proj1 = root.join("path/to/proj/1"); + std::fs::create_dir_all(&proj1).unwrap(); + state + .add_installation( + &installation_id, + &[verified_package], + &proj1, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + // Add a second project manifest for the same installation and write the state file. + let proj2 = root.join("path/to/proj/2"); + std::fs::create_dir_all(&proj2).unwrap(); + let _ = state.update_installation_manifests(&installation_id, &proj2); + state.persist().unwrap(); + + // Check that both unique manifests are present in the installation. + let new_state = State::load(test_env.config()).unwrap(); + let new_state_inner = new_state.inner.borrow(); + let manifests_in_state = &new_state_inner + .repr + .installations + .get(&installation_id) + .unwrap() + .manifests; + assert_eq!( + &BTreeSet::from([ + root.join("path/to/proj/1").canonicalize().unwrap(), + root.join("path/to/proj/2").canonicalize().unwrap() + ]), + manifests_in_state + ); + } + + #[test] + fn same_manifest_content_new_proj_twice_for_existing_installation_still_unique_manifest_paths_only( + ) { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let state = test_env.state(); + // Prepare env with one installation that has one manifest file path. + let installation_id = InstallationId("installation-id-1".to_string()); + let binary_proxies: BTreeMap = btreemap! { + "cargo".to_string() => format!("/path/to/{}/bin/cargo", installation_id.0), + "rustc".to_string() => format!("/path/to/{}/bin/rustc", installation_id.0), + }; + let verified_package = VerifiedPackage { + product: "ferrocene".to_string(), + package: "rusty".to_string(), + proxies_paths: binary_proxies, + }; + + let proj1 = root.join("path/to/proj/1"); + std::fs::create_dir_all(&proj1).unwrap(); + // Add installation and write the state file. + state + .add_installation( + &installation_id, + &[verified_package], + &proj1, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + // Load the State file and add update installation manifest with another unique path + // which mimics that for same installation id you can have the new path added + // here we update the same path multiple times. + let proj2 = root.join("path/to/proj/2"); + std::fs::create_dir_all(&proj2).unwrap(); + let state = State::load(test_env.config()).unwrap(); + let _ = state.update_installation_manifests(&installation_id, &proj2); + state.persist().unwrap(); + let _ = state.update_installation_manifests(&installation_id, &proj2); + state.persist().unwrap(); + let _ = state.update_installation_manifests(&installation_id, &proj2); + state.persist().unwrap(); + + let new_state = State::load(test_env.config()).unwrap().inner; + let new_state = &new_state.borrow_mut(); + let manifests_in_state = &new_state + .repr + .installations + .get(&installation_id) + .unwrap() + .manifests; + + assert_eq!( + &BTreeSet::from([ + root.join("path/to/proj/1").canonicalize().unwrap(), + root.join("path/to/proj/2").canonicalize().unwrap() + ]), + manifests_in_state + ); + } + + /// Starts with two installations with one manifest/project each and then updates the State + /// by adding second manifest to the first installation. + /// + /// Should result in empty manifests section of second installation and two manifests in the + /// first installation. + #[test] + fn two_installations_empty_manifests_section_when_moved() { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let state = test_env.state(); + + // Prepare env with two installations with different manifest paths. + let proj1 = root.join("path/to/proj/1"); + std::fs::create_dir_all(&proj1).unwrap(); + let proj2 = root.join("path/to/proj/2"); + std::fs::create_dir_all(&proj2).unwrap(); + + // Installation 1. + let installation_id_1 = InstallationId("installation-id-1".to_string()); + let binary_proxies_1: BTreeMap = btreemap! { + "cargo".to_string() => format!("/path/to/{}/bin/cargo", installation_id_1.0), + "rustc".to_string() => format!("/path/to/{}/bin/rustc", installation_id_1.0), + }; + let verified_package_1 = VerifiedPackage { + product: "ferrocene".to_string(), + package: "rusty".to_string(), + proxies_paths: binary_proxies_1, + }; + + // Add installation 1 and write the state file. + state + .add_installation( + &installation_id_1, + &[verified_package_1], + &proj1, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + // Installation 2. + let installation_id_2 = InstallationId("installation-id-2".to_string()); + let binary_proxies_2: BTreeMap = btreemap! { + "amazing".to_string() => format!("/path/to/{}/bin/amazing", installation_id_2.0), + "stuff".to_string() => format!("/path/to/{}/bin/stuff", installation_id_2.0), + }; + let verified_package_2 = VerifiedPackage { + product: "ferrocene".to_string(), + package: "rusty".to_string(), + proxies_paths: binary_proxies_2, + }; + + // Add installation 2 and write the state file. + state + .add_installation( + &installation_id_2, + &[verified_package_2], + &proj2, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + // Load the State file and add update installation manifest with another unique path + // which mimics that for same installation id you can have the new path added + // here we update the same path multiple times. + let state = State::load(test_env.config()).unwrap(); + let _ = state.update_installation_manifests(&installation_id_1, &proj2); + state.persist().unwrap(); + + // Check that the installation 1 has both project manifests and the installation 2 has + // no project manifests (empty manifests). + let new_state = State::load(test_env.config()).unwrap().inner; + let new_state = &new_state.borrow_mut(); + let manifests_in_installation_1 = &new_state + .repr + .installations + .get(&installation_id_1) + .unwrap() + .manifests; + + assert_eq!( + &BTreeSet::from([ + root.join("path/to/proj/1").canonicalize().unwrap(), + root.join("path/to/proj/2").canonicalize().unwrap() + ]), + manifests_in_installation_1 + ); + + let manifests_in_installation_2 = &new_state + .repr + .installations + .get(&installation_id_2) + .unwrap() + .manifests; + assert_eq!(&BTreeSet::from([]), manifests_in_installation_2); + } + + #[test] + fn test_load_state_with_fs_error() { + let test_env = TestEnvironment::prepare(); + + // Creating a directory instead of a file should result in an IO error when we then try to + // read the contents of the file. + std::fs::create_dir_all(&test_env.config().paths.state_file).unwrap(); + + match State::load(test_env.config()) { + Err(Error::CantReadStateFile(path, _)) => { + assert_eq!(test_env.config().paths.state_file, path); + } + Err(err) => panic!("unexpected error when loading the state: {err:?}"), + Ok(_) => panic!("loading the state file succeeded"), + } + } + + #[test] + fn test_load_state_with_unsupported_version() { + let test_env = TestEnvironment::prepare(); + + std::fs::write( + &test_env.config().paths.state_file, + serde_json::to_vec_pretty(&StateRepr { + version: CURRENT_FORMAT_VERSION + 1, + ..StateRepr::default() + }) + .unwrap(), + ) + .unwrap(); + + match State::load(test_env.config()) { + Err(Error::UnsupportedStateFileVersion(path, version)) => { + assert_eq!(test_env.config().paths.state_file, path); + assert_eq!(CURRENT_FORMAT_VERSION + 1, version); + } + Err(err) => panic!("unexpected error when loading the state: {err:?}"), + Ok(_) => panic!("loading the state file succeeded"), + } + } + + #[test] + fn test_load_state_with_invalid_contents() { + let test_env = TestEnvironment::prepare(); + + std::fs::write(&test_env.config().paths.state_file, b"Hello world\n").unwrap(); + + match State::load(test_env.config()) { + Err(Error::CorruptStateFile(path, error)) => { + assert_eq!(test_env.config().paths.state_file, path); + assert!(error.is_syntax()); + } + Err(err) => panic!("unexpected error when loading the state: {err:?}"), + Ok(_) => panic!("loading the state file succeeded"), + } + } + + #[test] + fn docker_secrets_are_read_from_file() { + let test_env = TestEnvironment::with() + .state() + .root_in_subdir("run/secrets") + .prepare(); + let state = test_env.state(); + state.set_authentication_token(None); + + // Make sure the state file has authentication token as None. + assert_eq!(state.authentication_token(None), None); + + let file_token_content = "my-awesome-token-from-file"; + let token_name = "CRITICALUP_TOKEN"; + + // Add a temp secrets dir and create a token file there and make sure + // that that token is returned if legit file path was given. + let secrets_dir = test_env.root().join::("run/secrets".into()); + std::fs::create_dir_all(&secrets_dir).unwrap(); + std::fs::write(secrets_dir.join(token_name), file_token_content).unwrap(); + let token = test_env + .state() + .authentication_token(Some(secrets_dir.join(token_name).to_str().unwrap())); + assert_eq!(Some(AuthenticationToken(file_token_content.into())), token) + } + + #[test] + fn test_set_authentication_token() { + let test_env = TestEnvironment::with().state().prepare(); + let state = test_env.state(); + + state.set_authentication_token(None); + assert_eq!(None, state.authentication_token(None)); + + state.set_authentication_token(Some(AuthenticationToken("hello world".into()))); + assert_eq!( + Some(AuthenticationToken("hello world".into())), + state.authentication_token(None) + ); + } + + #[test] + fn test_persist_state() { + let test_env = TestEnvironment::with().state().prepare(); + + let token = AuthenticationToken("hello world".into()); + test_env + .state() + .set_authentication_token(Some(token.clone())); + test_env.state().persist().unwrap(); + + let new_state = State::load(test_env.config()).unwrap(); + assert_eq!(Some(token), new_state.authentication_token(None)); + } + + #[test] + fn test_persist_state_with_fs_io_error() { + let test_env = TestEnvironment::with().state().prepare(); + test_env + .state() + .set_authentication_token(Some(AuthenticationToken("hello world".into()))); + + // Simulate a file system error by creating a directory in the path the state file is + // supposed to be written. The current state was generated in memory, so we don't need to + // remove the previous contents at that path. + std::fs::create_dir_all(&test_env.config().paths.state_file).unwrap(); + + match test_env.state().persist() { + Err(Error::CantWriteStateFile(path, WriteFileError::Io(_))) => { + assert_eq!(test_env.config().paths.state_file, path); + } + Err(err) => panic!("unexpected error when persisting the state: {err:?}"), + Ok(_) => panic!("persisting the state file succeeded"), + } + } + + #[test] + fn test_persist_state_with_fs_parent_directory_error() { + let test_env = TestEnvironment::with() + .root_in_subdir("subdir") + .state() + .prepare(); + test_env + .state() + .set_authentication_token(Some(AuthenticationToken("hello world".into()))); + + // Simulate a file system error by creating a file in the path the parent directory of the + // state file is supposed to be written. The current state was generated in memory, so we + // don't need to remove the previous contents at that path. + std::fs::write(test_env.root().join("subdir"), b"").unwrap(); + + match test_env.state().persist() { + Err(Error::CantWriteStateFile(path, WriteFileError::CantCreateParentDirectory(_))) => { + assert_eq!(test_env.config().paths.state_file, path); + } + Err(err) => panic!("unexpected error when persisting the state: {err:?}"), + Ok(_) => panic!("persisting the state file succeeded"), + } + } + + #[test] + fn test_binary_proxies() { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let state = test_env.state(); + + let id1 = InstallationId("sample".into()); + let inst1_manifest_path = root.join("proj/1/manifest"); + std::fs::create_dir_all(&inst1_manifest_path).unwrap(); + let id2 = InstallationId("id".into()); + let inst2_manifest_path = root.join("proj/2/manifest"); + std::fs::create_dir_all(&inst2_manifest_path).unwrap(); + + state + .add_installation( + &id1, + &[ + VerifiedPackage { + product: "ferrocene".into(), + package: "foo".into(), + proxies_paths: btreemap! { "a" => "foo/a" }, + }, + VerifiedPackage { + product: "ferrocene".into(), + package: "bar".into(), + proxies_paths: btreemap! { "b" => "foo/b" }, + }, + ], + &inst1_manifest_path, + test_env.config(), + ) + .unwrap(); + assert_eq!(Some("foo/a".into()), state.resolve_binary_proxy(&id1, "a")); + assert_eq!(Some("foo/b".into()), state.resolve_binary_proxy(&id1, "b")); + assert_eq!( + vec!["a".to_string(), "b".into()], + state.all_binary_proxy_names() + ); + + state + .add_installation( + &id2, + &[VerifiedPackage { + product: "ferrocene".into(), + package: "foo".into(), + proxies_paths: btreemap! { "a" => "bar/a" }, + }], + &inst2_manifest_path, + test_env.config(), + ) + .unwrap(); + assert_eq!(Some("bar/a".into()), state.resolve_binary_proxy(&id2, "a")); + assert!(state.resolve_binary_proxy(&id2, "b").is_none()); + assert_eq!( + vec!["a".to_string(), "b".into()], + state.all_binary_proxy_names() + ); + + state.remove_installation(&id1); + assert_eq!(vec!["a".to_string()], state.all_binary_proxy_names()); + state.remove_installation(&id2); + + assert!(state.all_binary_proxy_names().is_empty()); + assert!(state.resolve_binary_proxy(&id1, "a").is_none()); + assert!(state.resolve_binary_proxy(&id1, "b").is_none()); + } + + #[test] + fn test_default_state_values() { + // This test ensures the default values for the state file do not change ACCIDENTALLY. If + // you intentionally made a change that resulted in this test failing you should change it + // to reflect the new defaults. + assert_eq!( + StateRepr { + version: 1, + authentication_token: None, + installations: BTreeMap::new(), + }, + StateRepr::default() + ); + } + + #[test] + fn test_authentication_token_debug_repr() { + assert_eq!("", format!("{:?}", AuthenticationToken::seal(""))); + assert_eq!("***", format!("{:?}", AuthenticationToken::seal("123"))); + assert_eq!( + "********", + format!("{:?}", AuthenticationToken::seal("12345678")) + ); + assert_eq!( + "******789", + format!("{:?}", AuthenticationToken::seal("123456789")) + ); + assert_eq!( + "****************789", + format!("{:?}", AuthenticationToken::seal("1234567890123456789")) + ); + } + + #[test] + fn all_unsed_installations_only() { + let test_env = TestEnvironment::with().state().prepare(); + let root = test_env.root(); + let state = test_env.state(); + + // Prepare env with first installation that has one manifest file path. + let installation_id_1 = InstallationId("installation-id-1".to_string()); + let verified_package = VerifiedPackage { + product: "ferrocene".to_string(), + package: "rusty".to_string(), + proxies_paths: BTreeMap::default(), + }; + + let proj1 = root.join("path/to/proj/1"); + std::fs::create_dir_all(&proj1).unwrap(); + // Add installation and write the state file. + state + .add_installation( + &installation_id_1, + &[verified_package.clone()], + &proj1, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + let proj2 = root.join("path/to/proj/2"); + std::fs::create_dir_all(&proj2).unwrap(); + // Prepare env with second installation that has one manifest file path. + let installation_id_2 = InstallationId("installation-id-2".to_string()); + state + .add_installation( + &installation_id_2, + &[verified_package.clone()], + &proj2, + test_env.config(), + ) + .unwrap(); + state.persist().unwrap(); + + // Add a second project manifest to the first installation. This will render the second + // installation with empty manifests section and will be return as "unused". + let _ = state.update_installation_manifests(&installation_id_1, &proj2); + state.persist().unwrap(); + + let unused_installations = state + .installations() + .iter() + .filter(|item| item.1.manifests().is_empty()) + .map(|item| item.0.to_owned()) + .collect::>(); + + assert_eq!( + Vec::from([installation_id_2.to_owned()]), + unused_installations + ) + } +} diff --git a/crates/criticalup-core/src/test_utils.rs b/crates/criticalup-core/src/test_utils.rs new file mode 100644 index 00000000..609dd0e7 --- /dev/null +++ b/crates/criticalup-core/src/test_utils.rs @@ -0,0 +1,230 @@ +use crate::config::Config; +use crate::download_server_client::DownloadServerClient; +use crate::state::{AuthenticationToken, State}; +use criticaltrust::keys::{EphemeralKeyPair, KeyAlgorithm, KeyPair, KeyRole, PublicKey}; +use criticaltrust::signatures::SignedPayload; +use mock_download_server::MockServer; +use std::path::Path; +use tempfile::TempDir; + +pub(crate) const SAMPLE_AUTH_TOKEN: &str = "criticalup_token_foo"; +pub(crate) const SAMPLE_AUTH_TOKEN_NAME: &str = "token name"; +pub(crate) const SAMPLE_AUTH_TOKEN_CUSTOMER: &str = "internal"; +pub(crate) const SAMPLE_AUTH_TOKEN_EXPIRY: &str = "2022-01-01T00:00:00+00:00"; + +pub(crate) struct TestEnvironment { + root: TempDir, + config: Config, + state: Option, + download_server: Option, + keys: Option, + + mock_server: Option, +} + +impl TestEnvironment { + pub(crate) fn with() -> TestEnvironmentBuilder { + TestEnvironmentBuilder { + state: false, + download_server: false, + keys: false, + root_in_subdir: None, + } + } + + pub(crate) fn prepare() -> Self { + Self::with().prepare() + } + + pub(crate) fn root(&self) -> &Path { + self.root.path() + } + + pub(crate) fn config(&self) -> &Config { + &self.config + } + + pub(crate) fn keys(&self) -> &TestKeys { + self.keys.as_ref().expect("keys not prepared") + } + + pub(crate) fn state(&self) -> &State { + self.state.as_ref().expect("state not prepared") + } + + pub(crate) fn download_server(&self) -> &DownloadServerClient { + self.download_server + .as_ref() + .expect("download server not prepared") + } + + pub(crate) fn requests_served_by_mock_download_server(&self) -> usize { + self.mock_server + .as_ref() + .expect("download server not prepared") + .served_requests_count() + } +} + +pub(crate) struct TestEnvironmentBuilder { + state: bool, + download_server: bool, + keys: bool, + root_in_subdir: Option, +} + +impl TestEnvironmentBuilder { + pub(crate) fn state(mut self) -> Self { + self.state = true; + self + } + + pub(crate) fn keys(mut self) -> Self { + self.keys = true; + self + } + + pub(crate) fn download_server(mut self) -> Self { + self.download_server = true; + self.state().keys() + } + + pub(crate) fn root_in_subdir(mut self, subdir: &str) -> Self { + self.root_in_subdir = Some(subdir.into()); + self + } + + pub(crate) fn prepare(self) -> TestEnvironment { + #[cfg(not(target_os = "windows"))] + let root = TempDir::new().expect("failed to create temp dir"); + + #[cfg(target_os = "windows")] + let root = + TempDir::new_in(std::env::current_dir().unwrap()).expect("failed to create temp dir"); + + let mut root_path = root.path().to_path_buf(); + if let Some(subdir) = self.root_in_subdir { + // A subdir creation is a requirement because root cannot be changed to anything + // that does not exist. + #[cfg(target_os = "windows")] + std::fs::create_dir_all(&subdir).unwrap(); + + root_path = root_path.join(subdir); + } + + let mut config = Config::test(root_path).expect("failed to create config"); + + let keys = if self.keys { + let keys = TestKeys::generate(); + config.whitelabel.trust_root = keys.trust_root.public().clone(); + Some(keys) + } else { + None + }; + + let mock_server = if self.download_server { + let server = start_mock_server(keys.as_ref().unwrap().signed_public_keys()); + config.whitelabel.download_server_url = server.url(); + Some(server) + } else { + None + }; + + let state = if self.state { + let state = State::load(&config).expect("failed to load state"); + state.set_authentication_token(Some(AuthenticationToken::seal(SAMPLE_AUTH_TOKEN))); + Some(state) + } else { + None + }; + + let download_server = if self.download_server { + Some(DownloadServerClient::new(&config, state.as_ref().unwrap())) + } else { + None + }; + + TestEnvironment { + root, + config, + state, + keys, + download_server, + mock_server, + } + } +} + +pub(crate) struct TestKeys { + pub(crate) trust_root: EphemeralKeyPair, + pub(crate) root: EphemeralKeyPair, + pub(crate) packages: EphemeralKeyPair, + pub(crate) releases: EphemeralKeyPair, + pub(crate) redirects: EphemeralKeyPair, + + pub(crate) alternate_trust_root: EphemeralKeyPair, + pub(crate) alternate_root: EphemeralKeyPair, + pub(crate) alternate_packages: EphemeralKeyPair, +} + +impl TestKeys { + fn generate() -> Self { + let generate = |role| { + EphemeralKeyPair::generate(KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, role, None) + .unwrap() + }; + + Self { + trust_root: generate(KeyRole::Root), + root: generate(KeyRole::Root), + packages: generate(KeyRole::Packages), + releases: generate(KeyRole::Releases), + redirects: generate(KeyRole::Redirects), + + alternate_trust_root: generate(KeyRole::Root), + alternate_root: generate(KeyRole::Root), + alternate_packages: generate(KeyRole::Packages), + } + } + + fn signed_public_keys(&self) -> Vec> { + let mut result = Vec::new(); + let mut sign = |key: &EphemeralKeyPair, keys: &[&EphemeralKeyPair]| { + let mut payload = SignedPayload::new(key.public()).unwrap(); + for key in keys { + payload.add_signature(*key).unwrap(); + } + result.push(payload); + }; + + sign(&self.root, &[&self.trust_root]); + sign(&self.packages, &[&self.root]); + sign(&self.releases, &[&self.root]); + sign(&self.redirects, &[&self.root]); + + sign(&self.alternate_root, &[&self.alternate_trust_root]); + sign(&self.alternate_packages, &[&self.alternate_root]); + + result + } +} + +fn start_mock_server(keys: Vec>) -> MockServer { + use mock_download_server::AuthenticationToken; + + let mut builder = mock_download_server::new(); + builder = builder.add_token( + SAMPLE_AUTH_TOKEN, + AuthenticationToken { + name: SAMPLE_AUTH_TOKEN_NAME.into(), + organization_name: SAMPLE_AUTH_TOKEN_CUSTOMER.into(), + expires_at: Some(SAMPLE_AUTH_TOKEN_EXPIRY.into()), + }, + ); + + for key in keys { + builder = builder.add_key(key); + } + + builder.start() +} diff --git a/crates/criticalup-core/src/utils.rs b/crates/criticalup-core/src/utils.rs new file mode 100644 index 00000000..15cf3b0a --- /dev/null +++ b/crates/criticalup-core/src/utils.rs @@ -0,0 +1,78 @@ +use crate::errors::WriteFileError; +use sha2::{Digest, Sha256}; +use std::fs::File; +use std::hash::Hasher; +use std::io::BufWriter; +use std::path::Path; + +pub(crate) fn open_file_for_write(path: &Path) -> Result, WriteFileError> { + // Ensure the parent directory is always present + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).map_err(WriteFileError::CantCreateParentDirectory)?; + } + + Ok(BufWriter::new( + File::create(path).map_err(WriteFileError::Io)?, + )) +} + +/// A `Hasher` helper type which is a wrapper to a choice of cryptographic hashing algorithm +/// to generate cryptographic hash of our types. This is needed to make sure we +/// 1. do not use [`DefaultHasher`], which may change its algorithm, for hash state +/// 2. bridge the gap between normal [`Hash`] and cryptographic hash (e.g. [`Sha256`]) +/// 3. better ergonomics to create a hash of our types like [`ProjectManifestProduct`] using `#[derive(Hash)]` +pub struct Sha256Hasher(Sha256); + +impl Sha256Hasher { + pub(crate) fn new() -> Self { + Self(Sha256::new()) + } + + /// Provides the final hash value + pub(crate) fn finalize(self) -> String { + format!("{:x}", self.0.finalize()) + } +} + +/// +impl Hasher for Sha256Hasher { + /// This method is unreachable and here to appease the compiler, mandatory method. + fn finish(&self) -> u64 { + unreachable!() + } + + /// Update the hasher state, mandatory method. + fn write(&mut self, bytes: &[u8]) { + self.0.update(bytes) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::hash::Hash; + + #[test] + fn test_sha256_for_a_struct_works() { + #[derive(Hash)] + struct Abc { + name: String, + version: String, + dependencies: Vec, + } + + let abc = Abc { + name: "abc".to_string(), + version: "1.2.3".to_string(), + dependencies: vec!["dep2".to_string(), "dep1".to_string()], + }; + + let mut hasher = Sha256Hasher::new(); + abc.hash(&mut hasher); + let final_hash = hasher.finalize(); + assert_eq!( + "fb9eee112b5cee551f7a5088402e53dbbdaec2a1e1cd3f4663a1b81b1b53015f", + final_hash + ) + } +} diff --git a/crates/criticalup-dev/Cargo.toml b/crates/criticalup-dev/Cargo.toml new file mode 100644 index 00000000..f20d27ac --- /dev/null +++ b/crates/criticalup-dev/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "criticalup-dev" +version = "0.1.0" +edition = "2021" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +criticaltrust = { path = "../criticaltrust" } +criticalup-cli = { path = "../criticalup-cli" } diff --git a/crates/criticalup-dev/README.md b/crates/criticalup-dev/README.md new file mode 100644 index 00000000..b88146c4 --- /dev/null +++ b/crates/criticalup-dev/README.md @@ -0,0 +1,47 @@ +`criticalup-dev` +---------------- + +A command line tool similar to `rustup` to manage installations of Ferrocene toolchains. + +> [!NOTE] +> The documentation here is primarily intended for developers of the `criticalup-dev` crate. +> +> Ferrocene users should refer to [the documentation][ferrocene-public-docs] for all their needs. + +Installation +============ + +> [!NOTE] +> This repository is currently private, you need to use an SSO-authenticated SSH key or token along with the `--git` parameter. + +```bash +cargo install --git ssh://git@github.com/ferrocene/criticalup.git criticalup-dev +criticalup-dev --help +``` + +Usage +===== + +To authenticate with the portal: + +```bash +criticalup-dev auth set +``` + +Then, enter a token obtained from the [Token page of the Customer Portal][customer-portal-tokens]. + +To check authentication status: + +```bash +criticalup-dev auth +``` + +To install the toolchain specified by the `criticalup.toml` in the current working directory: + +```bash +criticalup-dev install +``` + +[ferrocene-public-docs]: https://public-docs.ferrocene.dev/main/index.html +[customer-portal]: https://customers-dev.ferrocene.dev/ +[customer-portal-tokens]: https://customers-dev.ferrocene.dev/users/tokens \ No newline at end of file diff --git a/crates/criticalup-dev/src/main.rs b/crates/criticalup-dev/src/main.rs new file mode 100644 index 00000000..98d89744 --- /dev/null +++ b/crates/criticalup-dev/src/main.rs @@ -0,0 +1,30 @@ +use criticaltrust::keys::newtypes::PublicKeyBytes; +use criticaltrust::keys::{KeyAlgorithm, KeyRole, PublicKey}; + +fn main() { + let whitelabel = criticalup_cli::WhitelabelConfig { + name: "criticalup-dev", + http_user_agent: concat!("criticalup/", env!("CARGO_PKG_VERSION"), " (dev)"), + download_server_url: "https://criticalup-downloads-dev.ferrocene.dev".into(), + customer_portal_url: "https://customers-dev.ferrocene.dev".into(), + // TODO: this key is not permanent, and must be changed before criticalup is released. The + // key was ephemeral when it was generated, and is not persisted anywhere. If we keep it + // as-is in the binaries we release, we won't be able to change the signing keys. + trust_root: PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: None, + public: PublicKeyBytes::borrowed(&[ + 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72, 206, 61, 3, 1, + 7, 3, 66, 0, 4, 19, 173, 118, 198, 129, 248, 105, 3, 11, 48, 104, 0, 121, 174, 246, + 253, 35, 160, 246, 160, 6, 104, 28, 0, 105, 25, 55, 112, 246, 234, 57, 192, 254, + 247, 238, 41, 63, 104, 251, 171, 202, 168, 117, 89, 203, 124, 0, 92, 203, 94, 171, + 68, 232, 71, 66, 59, 100, 64, 66, 53, 107, 204, 134, 227, + ]), + }, + test_mode: false, + }; + + let args = std::env::args_os().collect::>(); + std::process::exit(criticalup_cli::main(whitelabel, &args)); +} diff --git a/crates/criticalup/Cargo.toml b/crates/criticalup/Cargo.toml new file mode 100644 index 00000000..86e79081 --- /dev/null +++ b/crates/criticalup/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "criticalup" +version = "0.1.0" +edition = "2021" +authors = ["The CriticalUp Developers"] +description = "Ferrocene toolchain manager" + +[package.metadata.wix] +upgrade-guid = "CB83A96F-1F5B-4C75-A077-876EB03EEA67" +path-guid = "5DABF654-7C2A-4DE4-8CAD-21CCC843B105" +license = false +eula = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +criticaltrust = { path = "../criticaltrust" } +criticalup-cli = { path = "../criticalup-cli" } diff --git a/crates/criticalup/README.md b/crates/criticalup/README.md new file mode 100644 index 00000000..a114a40e --- /dev/null +++ b/crates/criticalup/README.md @@ -0,0 +1,47 @@ +`criticalup` +------------ + +A command line tool similar to `rustup` to manage installations of Ferrocene toolchains. + +> [!NOTE] +> The documentation here is primarily intended for developers of the `criticalup` crate. +> +> Ferrocene users should refer to [the documentation][ferrocene-public-docs] for all their needs. + +Installation +============ + +> [!NOTE] +> This repository is currently private, you need to use an SSO-authenticated SSH key or token along with the `--git` parameter. + +```bash +cargo install --git ssh://git@github.com/ferrocene/criticalup.git criticalup +criticalup --help +``` + +Usage +===== + +To authenticate with the portal: + +```bash +criticalup auth set +``` + +Then, enter a token obtained from the [Token page of the Customer Portal][customer-portal-tokens]. + +To check authentication status: + +```bash +criticalup auth +``` + +To install the toolchain specified by the `criticalup.toml` in the current working directory: + +```bash +criticalup install +``` + +[ferrocene-public-docs]: https://public-docs.ferrocene.dev/main/index.html +[customer-portal]: https://customers.ferrocene.dev/ +[customer-portal-tokens]: https://customers.ferrocene.dev/users/tokens \ No newline at end of file diff --git a/crates/criticalup/src/main.rs b/crates/criticalup/src/main.rs new file mode 100644 index 00000000..17551af7 --- /dev/null +++ b/crates/criticalup/src/main.rs @@ -0,0 +1,30 @@ +use criticaltrust::keys::newtypes::PublicKeyBytes; +use criticaltrust::keys::{KeyAlgorithm, KeyRole, PublicKey}; + +fn main() { + let whitelabel = criticalup_cli::WhitelabelConfig { + name: "criticalup", + http_user_agent: concat!("criticalup/", env!("CARGO_PKG_VERSION")), + download_server_url: "https://criticalup-downloads.ferrocene.dev".into(), + customer_portal_url: "https://customers.ferrocene.dev/".into(), + // TODO: this key is not permanent, and must be changed before criticalup is released. The + // key was ephemeral when it was generated, and is not persisted anywhere. If we keep it + // as-is in the binaries we release, we won't be able to change the signing keys. + trust_root: PublicKey { + role: KeyRole::Root, + algorithm: KeyAlgorithm::EcdsaP256Sha256Asn1SpkiDer, + expiry: None, + public: PublicKeyBytes::borrowed(&[ + 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72, 206, 61, 3, 1, + 7, 3, 66, 0, 4, 145, 199, 131, 120, 202, 45, 142, 29, 104, 51, 133, 141, 86, 87, + 31, 25, 63, 99, 132, 215, 24, 171, 63, 51, 54, 72, 153, 241, 61, 193, 107, 196, + 195, 226, 200, 57, 245, 120, 201, 209, 158, 75, 216, 115, 53, 114, 11, 12, 108, + 186, 206, 173, 85, 153, 172, 172, 172, 191, 74, 241, 22, 96, 62, 242, + ]), + }, + test_mode: false, + }; + + let args = std::env::args_os().collect::>(); + std::process::exit(criticalup_cli::main(whitelabel, &args)); +} diff --git a/crates/criticalup/wix/main.wxs b/crates/criticalup/wix/main.wxs new file mode 100644 index 00000000..d9d3b3e5 --- /dev/null +++ b/crates/criticalup/wix/main.wxs @@ -0,0 +1,238 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + 1 + + + + + + + + + + + + + + + + + + diff --git a/crates/mock-download-server/Cargo.toml b/crates/mock-download-server/Cargo.toml new file mode 100644 index 00000000..e667690c --- /dev/null +++ b/crates/mock-download-server/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "mock-download-server" +version = "0.0.0" +edition = "2021" + +[dependencies] +anyhow = "1.0.55" +criticaltrust = { path = "../criticaltrust" } +serde = { version = "1.0.136", features = ["derive"] } +serde_json = "1.0.79" +tiny_http = { version = "0.12.0", default-features = false, features = ["rustls"] } diff --git a/crates/mock-download-server/src/handlers.rs b/crates/mock-download-server/src/handlers.rs new file mode 100644 index 00000000..ba90d742 --- /dev/null +++ b/crates/mock-download-server/src/handlers.rs @@ -0,0 +1,100 @@ +use crate::Serialize; +use crate::{AuthenticationToken, Data}; +use criticaltrust::manifests::ManifestVersion; +use tiny_http::{Header, Method, Request, Response, ResponseBox, StatusCode}; + +pub(crate) fn handle_request(data: &Data, req: &Request) -> ResponseBox { + let url_parts = req + .url() + .split('/') + .filter(|c| !c.is_empty()) + .collect::>(); + + let resp = match (req.method(), url_parts.as_slice()) { + (Method::Get, ["v1", "tokens", "current"]) => handle_v1_tokens_current(data, req), + (Method::Get, ["v1", "keys"]) => handle_v1_keys(data), + (Method::Get, ["v1", "releases", product, release]) => { + handle_v1_release(data, product, release) + } + _ => handle_404(), + }; + + // Handlers use `Result` to be able to use `?` to propagate error responses. There + // is no other difference between returning `Ok` or `Err`. + match resp { + Ok(resp) => resp.into_tiny_http(), + Err(resp) => resp.into_tiny_http(), + } +} + +fn handle_v1_tokens_current(data: &Data, req: &Request) -> Result { + let token = authorize(data, req)?; + Ok(Resp::json(token)) +} + +fn handle_v1_keys(data: &Data) -> Result { + Ok(Resp::json(&criticaltrust::manifests::KeysManifest { + version: ManifestVersion, + keys: data.keys.clone(), + })) +} + +fn handle_v1_release(data: &Data, product: &str, release: &str) -> Result { + let rm = data + .release_manifests + .get(&(product.to_string(), release.to_string())); + let resp = Resp::json(rm.expect("Did not get a release manifest")); + Ok(resp) +} + +fn handle_404() -> Result { + Ok(Resp::NotFound) +} + +fn authorize<'a>(data: &'a Data, req: &Request) -> Result<&'a AuthenticationToken, Resp> { + let header = req + .headers() + .iter() + .find(|h| h.field.equiv("authorization")) + .ok_or(Resp::Forbidden)?; + + let without_prefix = header + .value + .as_str() + .strip_prefix("Bearer ") + .ok_or(Resp::Forbidden)?; + + if let Some(token) = data.tokens.get(without_prefix) { + Ok(token) + } else { + Err(Resp::Forbidden) + } +} + +#[derive(Debug)] +enum Resp { + Forbidden, + NotFound, + Json(Vec), +} + +impl Resp { + fn json(data: &T) -> Resp { + let serialized = serde_json::to_vec_pretty(data).unwrap(); + Resp::Json(serialized) + } + + fn into_tiny_http(self) -> ResponseBox { + match self { + Resp::Json(data) => Response::from_data(data) + .with_status_code(StatusCode(200)) + .with_header( + Header::from_bytes(&b"Content-Type"[..], &b"application/json"[..]).unwrap(), + ) + .boxed(), + + Resp::Forbidden => Response::empty(StatusCode(403)).boxed(), + Resp::NotFound => Response::empty(StatusCode(404)).boxed(), + } + } +} diff --git a/crates/mock-download-server/src/lib.rs b/crates/mock-download-server/src/lib.rs new file mode 100644 index 00000000..7c737230 --- /dev/null +++ b/crates/mock-download-server/src/lib.rs @@ -0,0 +1,66 @@ +mod handlers; +mod server; + +pub use crate::server::MockServer; +use criticaltrust::keys::PublicKey; +use criticaltrust::manifests::ReleaseManifest; +use criticaltrust::signatures::SignedPayload; +use serde::Serialize; +use std::borrow::Cow; +use std::collections::HashMap; + +#[derive(Serialize, Clone)] +#[serde(rename_all = "kebab-case")] +pub struct AuthenticationToken { + pub name: Cow<'static, str>, + pub organization_name: Cow<'static, str>, + pub expires_at: Option>, +} + +pub struct Data { + pub tokens: HashMap, + pub keys: Vec>, + pub release_manifests: HashMap<(String, String), ReleaseManifest>, +} + +pub fn new() -> Builder { + Builder { + data: Data { + tokens: HashMap::new(), + keys: Vec::new(), + release_manifests: HashMap::new(), + }, + } +} + +pub struct Builder { + data: Data, +} + +impl Builder { + pub fn add_token(mut self, token: &str, info: AuthenticationToken) -> Self { + self.data.tokens.insert(token.into(), info); + self + } + + pub fn add_key(mut self, key: SignedPayload) -> Self { + self.data.keys.push(key); + self + } + + pub fn add_release_manifest( + mut self, + product: String, + release: String, + manifest: ReleaseManifest, + ) -> Self { + self.data + .release_manifests + .insert((product, release), manifest); + self + } + + pub fn start(self) -> MockServer { + MockServer::spawn(self.data) + } +} diff --git a/crates/mock-download-server/src/server.rs b/crates/mock-download-server/src/server.rs new file mode 100644 index 00000000..00014ea6 --- /dev/null +++ b/crates/mock-download-server/src/server.rs @@ -0,0 +1,75 @@ +use crate::handlers::handle_request; +use crate::Data; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::{Arc, Mutex}; +use std::thread::JoinHandle; +use tiny_http::Server; + +pub struct MockServer { + data: Arc>, + server: Arc, + handle: Option>, + served_requests: Arc, +} + +impl MockServer { + pub(crate) fn spawn(data: Data) -> Self { + let data = Arc::new(Mutex::new(data)); + + // Binding on port 0 results in the operative system picking a random available port, + // without the need of generating a random port ourselves and validating the port is not + // being used by another process. + // + // The real port can be then retrieved by checking the address of the bound server. + let server = Arc::new(Server::http("127.0.0.1:0").unwrap()); + + let served_requests = Arc::new(AtomicUsize::new(0)); + + let data_clone = data.clone(); + let server_clone = server.clone(); + let served_requests_clone = served_requests.clone(); + let handle = std::thread::spawn(move || { + server_thread(data_clone, server_clone, served_requests_clone); + }); + + Self { + data, + server, + handle: Some(handle), + served_requests, + } + } + + pub fn url(&self) -> String { + format!("http://{}", self.server.server_addr()) + } + + pub fn served_requests_count(&self) -> usize { + self.served_requests.load(Ordering::SeqCst) + } + + pub fn edit_data(&self, f: impl FnOnce(&mut Data)) { + f(&mut self.data.lock().unwrap()); + } +} + +impl Drop for MockServer { + fn drop(&mut self) { + self.server.unblock(); + if let Some(handle) = self.handle.take() { + match handle.join() { + Ok(_) => (), + Err(err) => eprintln!("{err:?}"), + } + } + } +} + +fn server_thread(data: Arc>, server: Arc, served_requests: Arc) { + for request in server.incoming_requests() { + let response = handle_request(&data.lock().unwrap(), &request); + request.respond(response).unwrap(); + + served_requests.fetch_add(1, Ordering::SeqCst); + } +} diff --git a/docs/.flake8 b/docs/.flake8 new file mode 100644 index 00000000..1ccc4f8e --- /dev/null +++ b/docs/.flake8 @@ -0,0 +1,6 @@ +# Configuration recommended by Black +# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8 + +[flake8] +max-line-length = 88 +extend-ignore = E203 diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..a0150b5c --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,10 @@ +# Sphinx ignored files +/build +/.venv + +# Python ignored files +__pycache__ +*.py[co] + +# Linkchecker ignored files +/.linkchecker diff --git a/docs/.gitmodules b/docs/.gitmodules new file mode 100644 index 00000000..1d58fdb4 --- /dev/null +++ b/docs/.gitmodules @@ -0,0 +1,3 @@ +[submodule "shared"] + path = shared + url = https://github.com/ferrocene/sphinx-shared-resources diff --git a/docs/LICENSES/Apache-2.0.txt b/docs/LICENSES/Apache-2.0.txt new file mode 100644 index 00000000..137069b8 --- /dev/null +++ b/docs/LICENSES/Apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/docs/LICENSES/MIT.txt b/docs/LICENSES/MIT.txt new file mode 100644 index 00000000..2071b23b --- /dev/null +++ b/docs/LICENSES/MIT.txt @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/README.rst b/docs/README.rst new file mode 100644 index 00000000..9876254e --- /dev/null +++ b/docs/README.rst @@ -0,0 +1,77 @@ +================================ +CriticalUp Documentation +================================ + +.. raw:: html + +

Read the + specification »

+ +The CriticalUp Documentation (CUD) is a document describing the CriticalUp +tool. + +The CriticalUp Documentation text is licensed under either the ``MIT`` +or ``Apache-2.0`` licenses, at your option. Individual files might have +different licensing. Licensing metadata is present in each file, and the full +licenses text is present in the ``LICENSES/`` directory. + +Building the specification +========================== + +CUD uses `Sphinx`_ to build a rendered version of the specification. To +simplify building the rendered version, we created a script called ``make.py`` +that takes care of installing the expected Sphinx release and invoking it with +the right flags. + +You can build the rendered version by running:: + + ./make.py + +By default, Sphinx uses incremental rebuilds to generate the content that +changed since the last invocation. If you notice a problem with incremental +rebuilds, you can pass the ``-c`` flag to clear the existing artifacts before +building:: + + ./make.py -c + +The rendered version will be available in ``build/html/``. + +You can also start a local server on port 8000 with automatic rebuild and +reload whenever you change a file by passing the ``-s`` flag:: + + ./make.py -s + +Checking links consistency +========================== + +It's possible to run Rust's linkchecker tool on the rendered documentation, to +see if there are broken links. To do so, pass the ``--check-links`` flag:: + + ./make.py --check-links + +This will clone the source code of the tool, build it, and execute it on the +rendered documentation. + +.. _Sphinx: https://www.sphinx-doc.org + +Updating build dependencies +=========================== + +The CUD uses ``pip-tools`` to manage the Python dependencies used for builds, +as it allows pinning hashes for the dependencies. While it doesn't add any +additional burden when installing dependencies (the format it outputs is +understood by `pip`), you have to install it when regenerating the +``requirements.txt`` file. + +To install `pip-tools`, we recommend first installing `pipx`_, and then +running:: + + pipx install pip-tools + +Once that's done, you can change the list of desired dependencies in the +``requirements.in`` file, and run this command to regenerate the +``requirements.txt`` file:: + + pip-compile --generate-hashes + +.. _pipx: https://pypa.github.io/pipx/ diff --git a/docs/exts/.gitkeep b/docs/exts/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/docs/make.py b/docs/make.py new file mode 100755 index 00000000..2c529ad5 --- /dev/null +++ b/docs/make.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +import os +import subprocess +import sys + +root = os.path.abspath(os.path.dirname(__file__)) +subprocess.run( + ["git", "submodule", "update", "--init"], + check=True, + cwd=root, +) + +sys.path.insert(0, "shared") +import make_common # noqa: E402 + +make_common.main(root) diff --git a/docs/sphinx-substitutions.toml b/docs/sphinx-substitutions.toml new file mode 100644 index 00000000..1e023eeb --- /dev/null +++ b/docs/sphinx-substitutions.toml @@ -0,0 +1,15 @@ +# This file defines all the substitutions available in the Sphinx documentation +# sites using the `ferrocene_qualification` extension. + +# look at substitutions.py for the rest of the substitutions + +ferrocene_ASIL = "ASIL D" +ferrocene_TCL = "TCL 3" +ferrocene_TQL = "class T3" + +iso_ref = "ISO-26262:2018" +iso_doc = "Road Vehicles - Functional Safety" +iec_ref = "IEC-61508:2010" +iec_doc = "Functional safety of electrical/electronic/programmable electronic safety-related systems" +9001_ref = "ISO-9001:2015" +9001_doc = "Quality management systems — Requirements" diff --git a/docs/src/cli.rst b/docs/src/cli.rst new file mode 100644 index 00000000..c953a2a3 --- /dev/null +++ b/docs/src/cli.rst @@ -0,0 +1,4 @@ +.. _cli: + +Error Codes +=========== \ No newline at end of file diff --git a/docs/src/conf.py b/docs/src/conf.py new file mode 100644 index 00000000..66d173b8 --- /dev/null +++ b/docs/src/conf.py @@ -0,0 +1,62 @@ +# -- Path setup -------------------------------------------------------------- + +import os +import sys + +sys.path.append(os.path.abspath("../shared/exts")) + + +# -- Project information ----------------------------------------------------- + +project = "CriticalUp Documentation" +copyright = "The Ferrocene Developers" +author = "The Ferrocene Developers" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "ferrocene_toctrees", + "ferrocene_intersphinx_support", + "ferrocene_qualification", + "ferrocene_domain_cli", +] + +# autosectionlabel unique names settings +autosectionlabel_prefix_document = True +ferrocene_substitutions_path = "sphinx-substitutions.toml" +ferrocene_target_names_path = "target-names.toml" +ferrocene_id = "CUD" + +# Add any paths that contain templates here, relative to this directory. +templates_path = [] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# + +html_theme = "ferrocene" +html_theme_path = ["../shared/themes"] + +html_theme_options = { + "license": "MIT or Apache 2.0", +} + +html_title = "CriticalUp Documentation" +html_short_title = "CriticalUp Documentation" + +# -- Options for linting ----------------------------------------------------- + +lint_alphabetical_section_titles = ["glossary"] + +lint_no_paragraph_ids = ["index"] diff --git a/docs/src/criticalup_toml.rst b/docs/src/criticalup_toml.rst new file mode 100644 index 00000000..3f9db39b --- /dev/null +++ b/docs/src/criticalup_toml.rst @@ -0,0 +1,4 @@ +.. _criticalup_toml: + +criticalup.toml +=============== \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst new file mode 100644 index 00000000..e88b1109 --- /dev/null +++ b/docs/src/index.rst @@ -0,0 +1,40 @@ +Ferrocene User Manual +===================== + +.. toctree:: + :numbered: + :maxdepth: 2 + :caption: About this manual: + + overview + +.. toctree:: + :numbered: + :maxdepth: 2 + :caption: About CriticalUp: + + system-requirements + install + platforms + +.. toctree:: + :numbered: + :maxdepth: 2 + :caption: Using CriticalUp: + + using-criticalup/authenticating + using-criticalup/toolchain-management + using-criticalup/running-tools + +.. toctree:: + :numbered: + :maxdepth: 2 + :caption: Reference: + + cli + criticalup_toml + +Indices and tables +------------------ + +* :ref:`search` diff --git a/docs/src/install.rst b/docs/src/install.rst new file mode 100644 index 00000000..c765c0a1 --- /dev/null +++ b/docs/src/install.rst @@ -0,0 +1,49 @@ +.. _install: + +Installing CriticalUp +===================== + +This chapter describes how to install CriticalUp. + +Before proceeding, you should identify the :doc:`platform <../platforms>` you +want to install onto. You must pick the platform of the host you're going to +install CriticalUp on. + +Based on the platform you chose, you must follow the directions for the +relevant operating system. Installation and usage does not differ between +architectures unless otherwise noted. + +Linux +----- + +From a terminal run: + +.. code-block:: + + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/ferrocene/criticalup/releases/download/criticalup-cli-v0.0.0/criticalup-cli-installer.sh | sh + +CriticalUp will install into ``$XDG_DATA_HOME/criticalup``, or if that environment variable is not set, ``$HOME/.local/share/criticalup``. + + +MacOS +----- + +From a terminal run: + +.. code-block:: + + curl --proto '=https' --tlsv1.2 -LsSf https://github.com/ferrocene/criticalup/releases/download/criticalup-cli-v0.0.0/criticalup-cli-installer.sh | sh + +CriticalUp will install into ``$HOME/Library/Application Support/criticalup``. + + +Windows +------- + +From a terminal run: + +.. code-block:: + + powershell -c "irm https://github.com/ferrocene/criticalup/releases/download/criticalup-cli-v0.0.0/criticalup-cli-installer.ps1 | iex" + +CriticalUp will install into ``{FOLDERID_RoamingAppData}``, usually ``%appdata%\\criticalup``. \ No newline at end of file diff --git a/docs/src/overview.rst b/docs/src/overview.rst new file mode 100644 index 00000000..9be9532c --- /dev/null +++ b/docs/src/overview.rst @@ -0,0 +1,22 @@ +.. _overview: + +Overview +======== + +This Manual describes the use of CriticalUp, the Ferrocene toolchain manager. + +This Manual assumes familiarity with the command line and outlines +instructions specific to CriticalUp. + +Structure +--------- + +This guide contains the following chapters: + +* *About CriticalUp* describes the requirements and process of installing + CriticalUp. +* *Using CriticalUp* describes how to use CriticalUp to manage Ferrocene + installations. +* *References* provides further support and information about other + documentation. + diff --git a/docs/src/platforms.rst b/docs/src/platforms.rst new file mode 100644 index 00000000..89170bc6 --- /dev/null +++ b/docs/src/platforms.rst @@ -0,0 +1,42 @@ +.. _platforms: + +Platforms +========= + +CriticalUp has support for multiple platforms. + + +Supported platforms +------------------- + +While CriticalUp is not a qualified tool, it can be used to install qualified +toolchains. You must refer to the documentation for the version of Ferrocene +you are using to determine if a toolchain is qualified for safety-critical +contexts. + +.. list-table:: + :header-rows: 1 + + * - Target + - Triple + - Notes + + * - :target:`x86_64-unknown-linux-gnu` + - ``x86_64-unknown-linux-gnu`` + - \- + + * - :target:`aarch64-apple-darwin` + - ``aarch64-apple-darwin`` + - \- + + * - :target:`aarch64-unknown-linux-gnu` + - ``aarch64-unknown-linux-gnu`` + - \- + + * - :target:`x86_64-pc-windows-msvc` + - ``x86_64-pc-windows-msvc`` + - \- + + +If your project needs support for a target not listed here, please reach out to +the Ferrocene support team. diff --git a/docs/src/system-requirements.rst b/docs/src/system-requirements.rst new file mode 100644 index 00000000..cf9c7973 --- /dev/null +++ b/docs/src/system-requirements.rst @@ -0,0 +1,18 @@ +.. _system_requirements: + +System Requirements +=================== + +Even though any machine can run the CriticalUp toolchain, in order to get the +best experience, we recommend using a machine with many cores, since parts of +the compilation process are internally parallelized. The benefit of more cores +will vary depending on the structure of the project being compiled. + +A comfortable setup for a compiler server is a machine with 8 physical cores or +more, with at least 16 GB of memory (2 GB per core). For a desktop machine, a +minimum of 2 cores is recommended (8 preferred), with at least 2GB per core (so +4 to 16GB). + +Note that using local and fast drives will also make a difference in terms of +build and link time. Network drives should be avoided as much as possible and +will result in degraded performance. diff --git a/docs/src/using-criticalup/authenticating.rst b/docs/src/using-criticalup/authenticating.rst new file mode 100644 index 00000000..27b2845e --- /dev/null +++ b/docs/src/using-criticalup/authenticating.rst @@ -0,0 +1,43 @@ +.. _authenticate: + +Authenticating +============== + +This chapter describes how to authenicate CriticalUp with on the +`Ferrocene Customers Portal`_. + +The example assumes you have a preexisting Ferrocene account. This example +presumes no existing directory structure. + +.. _Ferrocene Customers Portal: https://customers.ferrocene.dev/ + +After :ref:`installing CriticalUp ` it's possible to authenticate +CriticalUp via the ``criticalup auth set`` subcommand. + + +.. code-block:: + + criticalup auth set + +Follow the on-screen instructions to generate a new token, then paste the token +into the prompt. CriticalUp will validate that the provided token is valid. + + +Check Authentication Status +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The current authentication state and token used can be reviewed with the ``auth`` command. + +.. code-block:: + + criticalup auth + + +Unauthenticating +^^^^^^^^^^^^^^^^ + +In order to remove the authenticated token, the ``auth remove`` command can be used. + +.. code-block:: + + criticalup auth remove \ No newline at end of file diff --git a/docs/src/using-criticalup/running-tools.rst b/docs/src/using-criticalup/running-tools.rst new file mode 100644 index 00000000..c0e2363e --- /dev/null +++ b/docs/src/using-criticalup/running-tools.rst @@ -0,0 +1,46 @@ +.. _running_tools: + +Running Tools +============= + +This chapter describes how to run specific tools using CriticalUp. + +The examples in this chapter assume the following directory structure: + +.. code-block:: + + . + └── project + └── criticalup.toml + +After :ref:`installing CriticalUp `, +:ref:`authenticating `, and :ref:`installing a toolchain +`, CriticalUp can be used to run the specified tools +from the installed toolchain. + +CriticalUp creates a set of *binary proxies* for tools which it has installed. +Which discover the relevant ``criticalup.toml`` and executes the correct +version of the tool. + +.. note:: + + If CriticalUp does not find a ``criticalup.toml`` in the current directory, + it will search the parent directory, then the parent of that, up to the root + directory of the system. + +.. code-block:: + + cd project + criticalup run rustc --help + + +Locating Tools +^^^^^^^^^^^^^^ + +We can find the true path of a tool for the current toolchain with the ``which`` command: + + +.. code-block:: + + cd project + criticalup which rustc diff --git a/docs/src/using-criticalup/toolchain-management.rst b/docs/src/using-criticalup/toolchain-management.rst new file mode 100644 index 00000000..137d0e4b --- /dev/null +++ b/docs/src/using-criticalup/toolchain-management.rst @@ -0,0 +1,89 @@ +.. _toolchain_management: + +Toolchain Management +==================== + +This chapter describes how to manage toolchains using CriticalUp. + + +The examples in this chapter assume the following directory structure: + +.. code-block:: + + . + └── project + └── criticalup.toml + +Where the ``criticalup.toml`` contains the following content: + +.. code-block:: + + manifest-version = 1 + + [products.ferrocene] + release = "nightly-2024-04-03" + packages = [ + "rustc-x86_64-unknown-linux-gnu", + "cargo-x86_64-unknown-linux-gnu", + ] + +.. _install_toolchain: + +Installing Toolchains +^^^^^^^^^^^^^^^^^^^^^ + +After :ref:`installing CriticalUp ` and +:ref:`authenticating ` CriticalUp is ready to manage +toolchains. + +You can can change directory into the project and install the required +toolchain. + +.. code-block:: + + cd project + criticalup install + +To change the installed products, edit the ``criticalup.toml`` as desired, for example: + +.. code-block:: + + manifest-version = 1 + + [products.ferrocene] + release = "nightly-2024-04-03" + packages = [ + "rustc-x86_64-unknown-linux-gnu", + "cargo-x86_64-unknown-linux-gnu", + "rustfmt-x86_64-unknown-linux-gnu", # Line added + ] + +Then run the install command again: + + +.. code-block:: + + criticalup install + +Removing Toolchains +^^^^^^^^^^^^^^^^^^^ + +An installation can be removed by running the ``criticalup remove`` command +from the directory containing the ```criticalup.toml```: + +.. code-block:: + + cd project + criticalup remove + +Cleaning Unused Toolchains +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Over time CriticalUp's stored installations may accumulate artifacts that +are no longer used. If CriticalUp's state directory begins to consume too much +disk space the ``clean`` command can help by deleting unused toolchains. + + +.. code-block:: + + criticalup clean \ No newline at end of file diff --git a/docs/target-names.toml b/docs/target-names.toml new file mode 100644 index 00000000..de1dbd9e --- /dev/null +++ b/docs/target-names.toml @@ -0,0 +1,18 @@ +# This file defines the human-readable names for the targets supported by +# Ferrocene, to provide consistency across documents on how we refer to them. +# +# In the documents, as long as the ferrocene_qualification extension is added, +# you can refer to them with :target:`triple`. + +aarch64-unknown-none = "Armv8-A bare-metal" +thumbv7em-none-eabi = "Armv7E-M bare-metal (soft-float)" +thumbv7em-none-eabihf = "Armv7E-M bare-metal (hard-float)" +armv8r-none-eabihf = "Armv8-R bare-metal (hard-float)" +armv7r-none-eabihf = "Armv7-R bare-metal (hard-float)" +armebv7r-none-eabihf = "Armv7-R bare-metal (hard-float, big-endian)" +wasm32-unknown-unknown = "WASM bare-metal" +x86_64-unknown-linux-gnu = "x86-64 Linux (glibc)" +aarch64-unknown-linux-gnu = "Armv8-A Linux (glibc)" +aarch64-apple-darwin = "Apple Silicon macOS" +x86_64-apple-darwin = "x86-64 macOS" +x86_64-pc-windows-msvc = "x86-64 Windows" diff --git a/docs/themes/.gitkeep b/docs/themes/.gitkeep new file mode 100644 index 00000000..e69de29b