diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml new file mode 100644 index 0000000..be006de --- /dev/null +++ b/.github/dependabot.yaml @@ -0,0 +1,13 @@ +# Keep GitHub Actions up to date with GitHub's Dependabot... +# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + groups: + github-actions: + patterns: + - "*" # Group all Actions updates into a single larger pull request + schedule: + interval: weekly diff --git a/.github/workflows/beta.yaml b/.github/workflows/beta.yaml new file mode 100644 index 0000000..7359d5b --- /dev/null +++ b/.github/workflows/beta.yaml @@ -0,0 +1,43 @@ +# Run tests using the beta Rust compiler + +name: Beta Rust + +on: + schedule: + # 06:50 UTC every Monday + - cron: '50 6 * * 1' + workflow_dispatch: + +concurrency: + group: beta-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + tests: + uses: './.github/workflows/tests.yaml' + with: + rust-version: beta + notify: + needs: tests + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Extract test results + run: | + printf '${{ toJSON(needs) }}\n' + result=$(echo '${{ toJSON(needs) }}' | jq -r .tests.result) + echo TESTS_RESULT=$result + echo "TESTS_RESULT=$result" >>"$GITHUB_ENV" + - name: Notify discord on failure + uses: n0-computer/discord-webhook-notify@v1 + if: ${{ env.TESTS_RESULT == 'failure' }} + with: + severity: error + details: | + Rustc beta tests failed in **${{ github.repository }}** + See https://github.com/${{ github.repository }}/actions/workflows/beta.yaml + webhookUrl: ${{ secrets.DISCORD_N0_GITHUB_CHANNEL_WEBHOOK_URL }} + diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..b4e086b --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,279 @@ +name: CI + +on: + pull_request: + types: [ 'labeled', 'unlabeled', 'opened', 'synchronize', 'reopened' ] + merge_group: + push: + branches: + - main + +concurrency: + group: ci-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + RUST_BACKTRACE: 1 + RUSTFLAGS: -Dwarnings + RUSTDOCFLAGS: -Dwarnings + MSRV: "1.85" + SCCACHE_CACHE_SIZE: "50G" + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + tests: + name: CI Test Suite + if: "github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'flaky-test')" + uses: './.github/workflows/tests.yaml' + + cross_build: + name: Cross Build Only + if: "github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'flaky-test')" + timeout-minutes: 30 + runs-on: [self-hosted, linux, X64] + strategy: + fail-fast: false + matrix: + target: + # cross tests are currently broken vor armv7 and aarch64 + # see https://github.com/cross-rs/cross/issues/1311 + # - armv7-linux-androideabi + # - aarch64-linux-android + # Freebsd execution fails in cross + # - i686-unknown-freebsd # Linking fails :/ + - x86_64-unknown-freebsd + # Netbsd execution fails to link in cross + # - x86_64-unknown-netbsd + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install rust stable + uses: dtolnay/rust-toolchain@stable + + - name: Cleanup Docker + continue-on-error: true + run: | + docker kill $(docker ps -q) + + # See https://github.com/cross-rs/cross/issues/1222 + - uses: taiki-e/install-action@cross + + - name: build + # cross tests are currently broken vor armv7 and aarch64 + # see https://github.com/cross-rs/cross/issues/1311. So on + # those platforms we only build but do not run tests. + run: cross build --all --target ${{ matrix.target }} + env: + RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} + + android_build: + name: Android Build Only + if: "github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'flaky-test')" + timeout-minutes: 30 + # runs-on: ubuntu-latest + runs-on: [self-hosted, linux, X64] + strategy: + fail-fast: false + matrix: + target: + - aarch64-linux-android + - armv7-linux-androideabi + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Rust + uses: dtolnay/rust-toolchain@stable + with: + target: ${{ matrix.target }} + - name: Install rustup target + run: rustup target add ${{ matrix.target }} + + - name: Setup Java + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + + - name: Setup Android SDK + uses: android-actions/setup-android@v3 + + - name: Setup Android NDK + uses: arqu/setup-ndk@main + id: setup-ndk + with: + ndk-version: r23 + add-to-path: true + + - name: Build + env: + ANDROID_NDK_HOME: ${{ steps.setup-ndk.outputs.ndk-path }} + run: | + cargo install --version 3.5.4 cargo-ndk + cargo ndk --target ${{ matrix.target }} build + + cross_test: + name: Cross Test + if: "github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'flaky-test')" + timeout-minutes: 30 + runs-on: [self-hosted, linux, X64] + strategy: + fail-fast: false + matrix: + target: + - i686-unknown-linux-gnu + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install rust stable + uses: dtolnay/rust-toolchain@stable + + - name: Cleanup Docker + continue-on-error: true + run: | + docker kill $(docker ps -q) + + # See https://github.com/cross-rs/cross/issues/1222 + - uses: taiki-e/install-action@cross + + - name: test + run: cross test --all --target ${{ matrix.target }} -- --test-threads=12 + env: + RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG' }} + + check_semver: + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Setup Environment (PR) + if: ${{ github.event_name == 'pull_request' }} + shell: bash + run: | + echo "HEAD_COMMIT_SHA=$(git rev-parse origin/${{ github.base_ref }})" >> ${GITHUB_ENV} + - name: Setup Environment (Push) + if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }} + shell: bash + run: | + echo "HEAD_COMMIT_SHA=$(git rev-parse origin/main)" >> ${GITHUB_ENV} + - name: Check semver + # uses: obi1kenobi/cargo-semver-checks-action@v2 + uses: n0-computer/cargo-semver-checks-action@feat-baseline + with: + package: iroh-blobs + baseline-rev: ${{ env.HEAD_COMMIT_SHA }} + use-cache: false + + check_fmt: + timeout-minutes: 30 + name: Checking fmt + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt + - uses: mozilla-actions/sccache-action@v0.0.9 + - uses: taiki-e/install-action@cargo-make + - run: cargo make format-check + + check_docs: + timeout-minutes: 30 + name: Checking docs + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2024-11-30 + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Docs + run: cargo doc --workspace --all-features --no-deps --document-private-items + env: + RUSTDOCFLAGS: --cfg docsrs + + clippy_check: + timeout-minutes: 30 + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + # TODO: We have a bunch of platform-dependent code so should + # probably run this job on the full platform matrix + - name: clippy check (all features) + run: cargo clippy --workspace --all-features --all-targets --bins --tests --benches + + - name: clippy check (no features) + run: cargo clippy --workspace --no-default-features --lib --bins --tests + + - name: clippy check (default features) + run: cargo clippy --workspace --all-targets + + msrv: + if: "github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'flaky-test')" + timeout-minutes: 30 + name: Minimal Supported Rust Version + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.MSRV }} + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Check MSRV all features + run: | + cargo +$MSRV check --workspace --all-targets + + cargo_deny: + timeout-minutes: 30 + name: cargo deny + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: EmbarkStudios/cargo-deny-action@v2 + with: + arguments: --workspace --all-features + command: check + command-arguments: "-Dwarnings" + + codespell: + timeout-minutes: 30 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: pip install --user codespell[toml] + - run: codespell --ignore-words-list=ans,atmost,crate,inout,ratatui,ser,stayin,swarmin,worl --skip=CHANGELOG.md diff --git a/.github/workflows/cleanup.yaml b/.github/workflows/cleanup.yaml new file mode 100644 index 0000000..130d321 --- /dev/null +++ b/.github/workflows/cleanup.yaml @@ -0,0 +1,45 @@ +# Run tests using the beta Rust compiler + +name: Cleanup + +on: + schedule: + # 06:50 UTC every Monday + - cron: '50 6 * * 1' + workflow_dispatch: + +concurrency: + group: beta-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + clean_docs_branch: + permissions: + issues: write + contents: write + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: generated-docs-preview + - name: Clean docs branch + run: | + cd pr/ + # keep the last 25 prs + dirs=$(ls -1d [0-9]* | sort -n) + total_dirs=$(echo "$dirs" | wc -l) + dirs_to_remove=$(echo "$dirs" | head -n $(($total_dirs - 25))) + if [ -n "$dirs_to_remove" ]; then + echo "$dirs_to_remove" | xargs rm -rf + fi + git add . + git commit -m "Cleanup old docs" + git push + + + + diff --git a/.github/workflows/commit.yaml b/.github/workflows/commit.yaml new file mode 100644 index 0000000..1b5c6d2 --- /dev/null +++ b/.github/workflows/commit.yaml @@ -0,0 +1,19 @@ +name: Commits + +on: + pull_request: + branches: [main] + types: [opened, edited, synchronize] + +env: + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + check-for-cc: + runs-on: ubuntu-latest + steps: + - name: check-for-cc + id: check-for-cc + uses: agenthunt/conventional-commit-checker-action@v2.0.0 + with: + pr-title-regex: "^(.+)(?:(([^)s]+)))?!?: (.+)" diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 0000000..3777009 --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,73 @@ +name: Docs Preview + +on: + pull_request: + workflow_dispatch: + inputs: + pr_number: + required: true + type: string + +# ensure job runs sequentially so pushing to the preview branch doesn't conflict +concurrency: + group: ci-docs-preview + +env: + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + preview_docs: + permissions: write-all + timeout-minutes: 30 + name: Docs preview + if: ${{ (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' ) && !github.event.pull_request.head.repo.fork }} + runs-on: ubuntu-latest + env: + RUSTC_WRAPPER: "sccache" + SCCACHE_GHA_ENABLED: "on" + SCCACHE_CACHE_SIZE: "50G" + PREVIEW_PATH: pr/${{ github.event.pull_request.number || inputs.pr_number }}/docs + + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2024-11-30 + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Generate Docs + run: cargo doc --workspace --all-features --no-deps + env: + RUSTDOCFLAGS: --cfg iroh_docsrs + + - name: Deploy Docs to Preview Branch + uses: peaceiris/actions-gh-pages@v4 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./target/doc/ + destination_dir: ${{ env.PREVIEW_PATH }} + publish_branch: generated-docs-preview + + - name: Find Docs Comment + uses: peter-evans/find-comment@v3 + id: fc + with: + issue-number: ${{ github.event.pull_request.number || inputs.pr_number }} + comment-author: 'github-actions[bot]' + body-includes: Documentation for this PR has been generated + + - name: Get current timestamp + id: get_timestamp + run: echo "TIMESTAMP=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV + + - name: Create or Update Docs Comment + uses: peter-evans/create-or-update-comment@v4 + with: + issue-number: ${{ github.event.pull_request.number || inputs.pr_number }} + comment-id: ${{ steps.fc.outputs.comment-id }} + body: | + Documentation for this PR has been generated and is available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/${{ env.PREVIEW_PATH }}/iroh_blobs/ + + Last updated: ${{ env.TIMESTAMP }} + edit-mode: replace diff --git a/.github/workflows/flaky.yaml b/.github/workflows/flaky.yaml new file mode 100644 index 0000000..cde6302 --- /dev/null +++ b/.github/workflows/flaky.yaml @@ -0,0 +1,99 @@ +# Run all tests, including flaky test. +# +# The default CI workflow ignores flaky tests. This workflow will run +# all tests, including ignored ones. +# +# To use this workflow you can either: +# +# - Label a PR with "flaky-test", the normal CI workflow will not run +# any jobs but the jobs here will be run. Note that to merge the PR +# you'll need to remove the label eventually because the normal CI +# jobs are required by branch protection. +# +# - Manually trigger the workflow, you may choose a branch for this to +# run on. +# +# Additionally this jobs runs once a day on a schedule. +# +# Currently doctests are not run by this workflow. + +name: Flaky CI + +on: + pull_request: + types: [ 'labeled', 'unlabeled', 'opened', 'synchronize', 'reopened' ] + schedule: + # 06:30 UTC every day + - cron: '30 6 * * *' + workflow_dispatch: + inputs: + branch: + description: 'Branch to run on, defaults to main' + required: true + default: 'main' + type: string + +concurrency: + group: flaky-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + tests: + if: "contains(github.event.pull_request.labels.*.name, 'flaky-test') || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule'" + uses: './.github/workflows/tests.yaml' + with: + flaky: true + git-ref: ${{ inputs.branch }} + notify: + needs: tests + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Extract test results + run: | + printf '${{ toJSON(needs) }}\n' + result=$(echo '${{ toJSON(needs) }}' | jq -r .tests.result) + echo TESTS_RESULT=$result + echo "TESTS_RESULT=$result" >>"$GITHUB_ENV" + - name: download nextest reports + uses: actions/download-artifact@v4 + with: + pattern: libtest_run_${{ github.run_number }}-${{ github.run_attempt }}-* + merge-multiple: true + path: nextest-results + - name: create summary report + id: make_summary + run: | + # prevent the glob expression in the loop to match on itself when the dir is empty + shopt -s nullglob + # to deal with multiline outputs it's recommended to use a random EOF, the syntax is based on + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + EOF=aP51VriWCxNJ1JjvmO9i + echo "summary<<$EOF" >> $GITHUB_OUTPUT + echo "Flaky tests failure:" >> $GITHUB_OUTPUT + echo " " >> $GITHUB_OUTPUT + for report in nextest-results/*.json; do + # remove the name prefix and extension, and split the parts + name=$(echo ${report:16:-5} | tr _ ' ') + echo $name + echo "- **$name**" >> $GITHUB_OUTPUT + # select the failed tests + # the tests have this format "crate::module$test_name", the sed expressions remove the quotes and replace $ for :: + failure=$(jq --slurp '.[] | select(.["type"] == "test" and .["event"] == "failed" ) | .["name"]' $report | sed -e 's/^"//g' -e 's/\$/::/' -e 's/"//') + echo "$failure" + echo "$failure" >> $GITHUB_OUTPUT + done + echo "" >> $GITHUB_OUTPUT + echo "See https://github.com/${{ github.repository }}/actions/workflows/flaky.yaml" >> $GITHUB_OUTPUT + echo "$EOF" >> $GITHUB_OUTPUT + - name: Notify discord on failure + uses: n0-computer/discord-webhook-notify@v1 + if: ${{ env.TESTS_RESULT == 'failure' || env.TESTS_RESULT == 'success' }} + with: + text: "Flaky tests in **${{ github.repository }}**:" + severity: ${{ env.TESTS_RESULT == 'failure' && 'warn' || 'info' }} + details: ${{ env.TESTS_RESULT == 'failure' && steps.make_summary.outputs.summary || 'No flaky failures!' }} + webhookUrl: ${{ secrets.DISCORD_N0_GITHUB_CHANNEL_WEBHOOK_URL }} diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml new file mode 100644 index 0000000..41511c4 --- /dev/null +++ b/.github/workflows/tests.yaml @@ -0,0 +1,229 @@ +# Run all tests, with or without flaky tests. + +name: Tests + +on: + workflow_call: + inputs: + rust-version: + description: 'The version of the rust compiler to run' + type: string + default: 'stable' + flaky: + description: 'Whether to also run flaky tests' + type: boolean + default: false + git-ref: + description: 'Which git ref to checkout' + type: string + default: ${{ github.ref }} + +env: + RUST_BACKTRACE: 1 + RUSTFLAGS: -Dwarnings + RUSTDOCFLAGS: -Dwarnings + SCCACHE_CACHE_SIZE: "50G" + CRATES_LIST: "iroh-blobs" + IROH_FORCE_STAGING_RELAYS: "1" + +jobs: + build_and_test_nix: + timeout-minutes: 30 + name: "Tests" + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + name: [ubuntu-latest, macOS-arm-latest] + rust: [ '${{ inputs.rust-version }}' ] + features: [all, none, default] + include: + - name: ubuntu-latest + os: ubuntu-latest + release-os: linux + release-arch: amd64 + runner: [self-hosted, linux, X64] + - name: macOS-arm-latest + os: macOS-latest + release-os: darwin + release-arch: aarch64 + runner: [self-hosted, macOS, ARM64] + env: + # Using self-hosted runners so use local cache for sccache and + # not SCCACHE_GHA_ENABLED. + RUSTC_WRAPPER: "sccache" + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ inputs.git-ref }} + + - name: Install ${{ matrix.rust }} rust + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ matrix.rust }} + + - name: Install cargo-nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.80 + + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - name: Select features + run: | + case "${{ matrix.features }}" in + all) + echo "FEATURES=--all-features" >> "$GITHUB_ENV" + ;; + none) + echo "FEATURES=--no-default-features" >> "$GITHUB_ENV" + ;; + default) + echo "FEATURES=" >> "$GITHUB_ENV" + ;; + *) + exit 1 + esac + + - name: check features + if: ${{ ! inputs.flaky }} + run: | + for i in ${CRATES_LIST//,/ } + do + echo "Checking $i $FEATURES" + if [ $i = "iroh-cli" ]; then + targets="--bins" + else + targets="--lib --bins" + fi + echo cargo check -p $i $FEATURES $targets + cargo check -p $i $FEATURES $targets + done + env: + RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} + + - name: build tests + run: | + cargo nextest run --workspace ${{ env.FEATURES }} --lib --bins --tests --no-run + + - name: list ignored tests + run: | + cargo nextest list --workspace ${{ env.FEATURES }} --lib --bins --tests --run-ignored ignored-only + + - name: run tests + run: | + mkdir -p output + cargo nextest run --workspace ${{ env.FEATURES }} --lib --bins --tests --profile ci --run-ignored ${{ inputs.flaky && 'all' || 'default' }} --no-fail-fast --message-format ${{ inputs.flaky && 'libtest-json' || 'human' }} > output/${{ matrix.name }}_${{ matrix.features }}_${{ matrix.rust }}.json + env: + RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} + NEXTEST_EXPERIMENTAL_LIBTEST_JSON: 1 + + - name: upload results + if: ${{ failure() && inputs.flaky }} + uses: actions/upload-artifact@v4 + with: + name: libtest_run_${{ github.run_number }}-${{ github.run_attempt }}-${{ matrix.name }}_${{ matrix.features }}_${{ matrix.rust }}.json + path: output + retention-days: 45 + compression-level: 0 + + - name: doctests + if: ${{ (! inputs.flaky) && matrix.features == 'all' }} + run: | + if [ -n "${{ runner.debug }}" ]; then + export RUST_LOG=TRACE + else + export RUST_LOG=DEBUG + fi + cargo test --workspace --all-features --doc + + build_and_test_windows: + timeout-minutes: 30 + name: "Tests" + runs-on: ${{ matrix.runner }} + strategy: + fail-fast: false + matrix: + name: [windows-latest] + rust: [ '${{ inputs.rust-version}}' ] + features: [all, none, default] + target: + - x86_64-pc-windows-msvc + include: + - name: windows-latest + os: windows + runner: [self-hosted, windows, x64] + env: + # Using self-hosted runners so use local cache for sccache and + # not SCCACHE_GHA_ENABLED. + RUSTC_WRAPPER: "sccache" + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ inputs.git-ref }} + + - name: Install ${{ matrix.rust }} + run: | + rustup toolchain install ${{ matrix.rust }} + rustup toolchain default ${{ matrix.rust }} + rustup target add ${{ matrix.target }} + rustup set default-host ${{ matrix.target }} + + - name: Install cargo-nextest + shell: powershell + run: | + $tmp = New-TemporaryFile | Rename-Item -NewName { $_ -replace 'tmp$', 'zip' } -PassThru + Invoke-WebRequest -OutFile $tmp https://get.nexte.st/latest/windows + $outputDir = if ($Env:CARGO_HOME) { Join-Path $Env:CARGO_HOME "bin" } else { "~/.cargo/bin" } + $tmp | Expand-Archive -DestinationPath $outputDir -Force + $tmp | Remove-Item + + - name: Select features + run: | + switch ("${{ matrix.features }}") { + "all" { + echo "FEATURES=--all-features" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + } + "none" { + echo "FEATURES=--no-default-features" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + } + "default" { + echo "FEATURES=" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + } + default { + Exit 1 + } + } + + - name: Install sccache + uses: mozilla-actions/sccache-action@v0.0.9 + + - uses: msys2/setup-msys2@v2 + + - name: build tests + run: | + cargo nextest run --workspace ${{ env.FEATURES }} --lib --bins --tests --target ${{ matrix.target }} --no-run + + - name: list ignored tests + run: | + cargo nextest list --workspace ${{ env.FEATURES }} --lib --bins --tests --target ${{ matrix.target }} --run-ignored ignored-only + + - name: tests + run: | + mkdir -p output + cargo nextest run --workspace ${{ env.FEATURES }} --lib --bins --tests --profile ci --target ${{ matrix.target }} --run-ignored ${{ inputs.flaky && 'all' || 'default' }} --no-fail-fast --message-format ${{ inputs.flaky && 'libtest-json' || 'human' }} > output/${{ matrix.name }}_${{ matrix.features }}_${{ matrix.rust }}.json + env: + RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} + NEXTEST_EXPERIMENTAL_LIBTEST_JSON: 1 + + - name: upload results + if: ${{ failure() && inputs.flaky }} + uses: actions/upload-artifact@v4 + with: + name: libtest_run_${{ github.run_number }}-${{ github.run_attempt }}-${{ matrix.name }}_${{ matrix.features }}_${{ matrix.rust }}.json + path: output + retention-days: 1 + compression-level: 0 diff --git a/Cargo.lock b/Cargo.lock index 95f19f0..4cdab35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -261,9 +261,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.8.0" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" +checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3" [[package]] name = "binary-merge" @@ -1737,7 +1737,7 @@ dependencies = [ [[package]] name = "iroh-blobs" -version = "0.1.0" +version = "0.90.0-alpha1" dependencies = [ "anyhow", "arrayvec", @@ -3106,9 +3106,9 @@ dependencies = [ [[package]] name = "redb" -version = "2.6.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef6a6d3a65ea334d6cdfb31fa2525c20184b7aa7bd1ad1e2e37502610d4609f" +checksum = "ea0a72cd7140de9fc3e318823b883abf819c20d478ec89ce880466dc2ef263c6" dependencies = [ "libc", ] diff --git a/Cargo.toml b/Cargo.toml index d4aa8a6..f522e88 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,15 @@ [package] name = "iroh-blobs" -version = "0.1.0" -edition = "2024" +version = "0.90.0-alpha1" +edition = "2021" +description = "content-addressed blobs for iroh" +license = "MIT OR Apache-2.0" +authors = ["dignifiedquire ", "n0 team"] +repository = "https://github.com/n0-computer/blobs2" +keywords = ["hashing", "quic", "blake3", "streaming"] + +# Sadly this also needs to be updated in .github/workflows/ci.yml +rust-version = "1.85" [dependencies] anyhow = "1.0.95" @@ -13,7 +21,7 @@ quinn = { package = "iroh-quinn", version = "0.14.0" } n0-future = "0.1.2" n0-snafu = "0.2.0" range-collections = { version = "0.4.6", features = ["serde"] } -redb = "2.4.0" +redb = { version = "=2.4" } smallvec = { version = "1", features = ["serde", "const_new"] } snafu = "0.8.5" tokio = { version = "1.43.0", features = ["full"] } diff --git a/DESIGN.md b/DESIGN.md index 1fbb566..1980283 100644 --- a/DESIGN.md +++ b/DESIGN.md @@ -32,7 +32,7 @@ This is a perfectly valid approach when dealing exclusively with complete, large First of all, it is very inefficient if you deal with a large number of tiny blobs, like we frequently do when working with [iroh-docs] or [iroh-willow] documents. Just the file system metadata for a tiny file will vastly exceed the storage needed for the data itself. -Also, now we are very much dependant on the quirks of whatever file system our target operating system has. Many older file systems like FAT32 or EXT2 are notoriously bad in handling directories with millions of files. And we can't just limit ourselves to e.g. linux servers with modern file systems, since we also want to support mobile platforms and windows PCs. +Also, now we are very much dependent on the quirks of whatever file system our target operating system has. Many older file systems like FAT32 or EXT2 are notoriously bad in handling directories with millions of files. And we can't just limit ourselves to e.g. linux servers with modern file systems, since we also want to support mobile platforms and windows PCs. And last but not least, creating a the metadata for a file is very expensive compared to writing a few bytes. We would be limited to a pathetically low download speed when bulk downloading millions of blobs, like for example an iroh collection containing the linux source code. For very small files embedded databases are [frequently faster](https://www.sqlite.org/fasterthanfs.html) than the file system. @@ -105,7 +105,7 @@ If we sync data from a remote node, we do know the hash but don't have the data. ### Blob deletion -On creation, blobs are tagged with a temporary tag that prevents them from being deleted for as long as the process lives. They can then be tagged with a persisten tag that prevents them from being deleted even after a restart. And last but not least, large groups of blobs can be protected from deletion in bulk by putting a sequence of hashes into a blob and tagging that blob as a hash sequence. +On creation, blobs are tagged with a temporary tag that prevents them from being deleted for as long as the process lives. They can then be tagged with a persistent tag that prevents them from being deleted even after a restart. And last but not least, large groups of blobs can be protected from deletion in bulk by putting a sequence of hashes into a blob and tagging that blob as a hash sequence. We also provide a way to explicitly delete blobs by hash, but that is meant to be used only in case of an emergency. You have some data that you want **gone** no matter how dire the consequences are. diff --git a/Makefile.toml b/Makefile.toml new file mode 100644 index 0000000..afee382 --- /dev/null +++ b/Makefile.toml @@ -0,0 +1,28 @@ +# Use cargo-make to run tasks here: https://crates.io/crates/cargo-make + +[tasks.format] +workspace = false +command = "cargo" +args = [ + "fmt", + "--all", + "--", + "--config", + "unstable_features=true", + "--config", + "imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true", +] + +[tasks.format-check] +workspace = false +command = "cargo" +args = [ + "fmt", + "--all", + "--check", + "--", + "--config", + "unstable_features=true", + "--config", + "imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true", +] diff --git a/deny.toml b/deny.toml new file mode 100644 index 0000000..026724f --- /dev/null +++ b/deny.toml @@ -0,0 +1,44 @@ +[advisories] +ignore = [ + "RUSTSEC-2024-0370", + "RUSTSEC-2024-0384", + "RUSTSEC-2024-0436", + "RUSTSEC-2023-0089", +] + +[bans] +deny = [ + "aws-lc", + "aws-lc-rs", + "aws-lc-sys", + "native-tls", + "openssl", +] +multiple-versions = "allow" + +[licenses] +allow = [ + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "BSD-2-Clause", + "BSD-3-Clause", + "BSL-1.0", + "ISC", + "MIT", + "Zlib", + "MPL-2.0", + "Unicode-3.0", + "Unlicense", + "CDLA-Permissive-2.0", +] + +[[licenses.clarify]] +expression = "MIT AND ISC AND OpenSSL" +name = "ring" + +[[licenses.clarify.license-files]] +hash = 3171872035 +path = "LICENSE" + +[sources] +allow-git = [] diff --git a/examples/random_store.rs b/examples/random_store.rs index c33d50b..349ede4 100644 --- a/examples/random_store.rs +++ b/examples/random_store.rs @@ -5,14 +5,14 @@ use clap::{Parser, Subcommand}; use iroh::{SecretKey, Watcher}; use iroh_base::ticket::NodeTicket; use iroh_blobs::{ - HashAndFormat, api::downloader::Shuffled, provider::Event, store::fs::FsStore, test::{add_hash_sequences, create_random_blobs}, + HashAndFormat, }; use n0_future::StreamExt; -use rand::{Rng, SeedableRng, rngs::StdRng}; +use rand::{rngs::StdRng, Rng, SeedableRng}; use tokio::{signal::ctrl_c, sync::mpsc}; use tracing::info; diff --git a/src/api.rs b/src/api.rs index d4a16cd..94f34ad 100644 --- a/src/api.rs +++ b/src/api.rs @@ -5,7 +5,7 @@ use std::{io, net::SocketAddr, ops::Deref, sync::Arc}; use iroh::Endpoint; -use irpc::rpc::{Handler, listen}; +use irpc::rpc::{listen, Handler}; use n0_snafu::SpanTrace; use nested_enum_utils::common_fields; use proto::{Request, ShutdownRequest, SyncDbRequest}; diff --git a/src/api/blobs.rs b/src/api/blobs.rs index e36bc46..fe709d6 100644 --- a/src/api/blobs.rs +++ b/src/api/blobs.rs @@ -15,19 +15,19 @@ use std::{ pub use bao_tree::io::mixed::EncodedItem; use bao_tree::{ - BaoTree, ChunkNum, ChunkRanges, io::{ - BaoContentItem, Leaf, fsm::{ResponseDecoder, ResponseDecoderNext}, + BaoContentItem, Leaf, }, + BaoTree, ChunkNum, ChunkRanges, }; use bytes::Bytes; use genawaiter::sync::Gen; use iroh_io::{AsyncStreamReader, TokioStreamReader}; use irpc::channel::{mpsc, oneshot}; -use n0_future::{Stream, StreamExt, future, stream}; +use n0_future::{future, stream, Stream, StreamExt}; use quinn::SendStream; -use range_collections::{RangeSet2, range_set::RangeSetRange}; +use range_collections::{range_set::RangeSetRange, RangeSet2}; use ref_cast::RefCast; use tokio::io::AsyncWriteExt; use tracing::trace; @@ -43,7 +43,6 @@ pub use super::proto::{ ImportBaoRequest as ImportBaoOptions, ImportMode, ObserveRequest as ObserveOptions, }; use super::{ - ApiClient, RequestResult, Tags, proto::{ BatchResponse, BlobStatusRequest, ClearProtectedRequest, CreateTempTagRequest, ExportBaoRequest, ExportRangesItem, ImportBaoRequest, ImportByteStreamRequest, @@ -51,13 +50,14 @@ use super::{ }, remote::HashSeqChunk, tags::TagInfo, + ApiClient, RequestResult, Tags, }; use crate::{ - BlobFormat, Hash, HashAndFormat, api::proto::{BatchRequest, ImportByteStreamUpdate}, provider::StreamContext, store::IROH_BLOCK_SIZE, util::temp_tag::TempTag, + BlobFormat, Hash, HashAndFormat, }; /// Options for adding bytes. @@ -696,7 +696,7 @@ impl ObserveProgress { } } -/// A progess handle for an export operation. +/// A progress handle for an export operation. /// /// Internally this is a stream of [`ExportProgress`] items. Working with this /// stream directly can be inconvenient, so this struct provides some convenience @@ -772,12 +772,12 @@ pub struct ImportBaoHandle { impl ImportBaoHandle { pub(crate) async fn new( fut: impl Future< - Output = irpc::Result<( - mpsc::Sender, - oneshot::Receiver>, - )>, - > + Send - + 'static, + Output = irpc::Result<( + mpsc::Sender, + oneshot::Receiver>, + )>, + > + Send + + 'static, ) -> irpc::Result { let (tx, rx) = fut.await?; Ok(Self { tx, rx }) diff --git a/src/api/downloader.rs b/src/api/downloader.rs index e92d3c7..6c1eb4a 100644 --- a/src/api/downloader.rs +++ b/src/api/downloader.rs @@ -2,6 +2,7 @@ use std::{ collections::{HashMap, HashSet}, fmt::Debug, + future::{Future, IntoFuture}, io, ops::Deref, sync::Arc, @@ -10,20 +11,20 @@ use std::{ use anyhow::bail; use genawaiter::sync::Gen; -use iroh::{Endpoint, NodeId, endpoint::Connection}; +use iroh::{endpoint::Connection, Endpoint, NodeId}; use irpc::{channel::mpsc, rpc_requests}; -use n0_future::{BufferedStreamExt, Stream, StreamExt, future, stream}; +use n0_future::{future, stream, BufferedStreamExt, Stream, StreamExt}; use rand::seq::SliceRandom; -use serde::{Deserialize, Serialize, de::Error}; +use serde::{de::Error, Deserialize, Serialize}; use tokio::{sync::Mutex, task::JoinSet}; use tokio_util::time::FutureExt; use tracing::{info, instrument::Instrument, warn}; -use super::{Store, remote::GetConnection}; +use super::{remote::GetConnection, Store}; use crate::{ - BlobFormat, Hash, HashAndFormat, protocol::{GetManyRequest, GetRequest}, util::sink::{Drain, IrpcSenderRefSink, Sink, TokioMpscSenderSink}, + BlobFormat, Hash, HashAndFormat, }; #[derive(Debug, Clone)] @@ -483,7 +484,8 @@ impl ConnectionPool { .entry(id) .or_default() .clone(); - *slot.lock().await = SlotState::Evil(reason) + let mut t = slot.lock().await; + *t = SlotState::Evil(reason) } #[allow(dead_code)] @@ -496,7 +498,8 @@ impl ConnectionPool { .entry(id) .or_default() .clone(); - *slot.lock().await = SlotState::Initial + let mut t = slot.lock().await; + *t = SlotState::Initial } } diff --git a/src/api/proto.rs b/src/api/proto.rs index 91a3476..ed3686e 100644 --- a/src/api/proto.rs +++ b/src/api/proto.rs @@ -24,8 +24,8 @@ use std::{ use arrayvec::ArrayString; use bao_tree::{ + io::{mixed::EncodedItem, BaoContentItem, Leaf}, ChunkRanges, - io::{BaoContentItem, Leaf, mixed::EncodedItem}, }; use bytes::Bytes; use irpc::{ @@ -38,7 +38,7 @@ use serde::{Deserialize, Serialize}; pub(crate) mod bitfield; pub use bitfield::Bitfield; -use crate::{BlobFormat, Hash, HashAndFormat, store::util::Tag, util::temp_tag::TempTag}; +use crate::{store::util::Tag, util::temp_tag::TempTag, BlobFormat, Hash, HashAndFormat}; pub(crate) trait HashSpecific { fn hash(&self) -> Hash; diff --git a/src/api/proto/bitfield.rs b/src/api/proto/bitfield.rs index 25321fe..d3ccca6 100644 --- a/src/api/proto/bitfield.rs +++ b/src/api/proto/bitfield.rs @@ -6,8 +6,8 @@ use serde::{Deserialize, Deserializer, Serialize}; use smallvec::SmallVec; use crate::store::util::{ - RangeSetExt, observer::{Combine, CombineInPlace}, + RangeSetExt, }; pub(crate) fn is_validated(size: NonZeroU64, ranges: &ChunkRanges) -> bool { @@ -275,7 +275,7 @@ impl UpdateResult { #[cfg(test)] mod tests { use bao_tree::{ChunkNum, ChunkRanges}; - use proptest::prelude::{Strategy, prop}; + use proptest::prelude::{prop, Strategy}; use smallvec::SmallVec; use test_strategy::proptest; diff --git a/src/api/remote.rs b/src/api/remote.rs index 0775c40..75e66d0 100644 --- a/src/api/remote.rs +++ b/src/api/remote.rs @@ -4,7 +4,7 @@ use genawaiter::sync::{Co, Gen}; use iroh::endpoint::SendStream; use irpc::util::{AsyncReadVarintExt, WriteVarintExt}; -use n0_future::{Stream, StreamExt, io}; +use n0_future::{io, Stream, StreamExt}; use n0_snafu::SpanTrace; use nested_enum_utils::common_fields; use ref_cast::RefCast; @@ -12,11 +12,11 @@ use snafu::{Backtrace, IntoError, Snafu}; use super::blobs::Bitfield; use crate::{ - api::{ApiClient, blobs::WriteProgress}, - get::{BadRequestSnafu, GetError, GetResult, LocalFailureSnafu, Stats, fsm::DecodeError}, + api::{blobs::WriteProgress, ApiClient}, + get::{fsm::DecodeError, BadRequestSnafu, GetError, GetResult, LocalFailureSnafu, Stats}, protocol::{ - GetManyRequest, MAX_MESSAGE_SIZE, ObserveItem, ObserveRequest, PushRequest, Request, - RequestType, + GetManyRequest, ObserveItem, ObserveRequest, PushRequest, Request, RequestType, + MAX_MESSAGE_SIZE, }, util::sink::{Sink, TokioMpscSenderSink}, }; @@ -820,28 +820,33 @@ pub enum ExecuteError { }, } -use std::{collections::BTreeMap, future::Future, num::NonZeroU64, sync::Arc}; +use std::{ + collections::BTreeMap, + future::{Future, IntoFuture}, + num::NonZeroU64, + sync::Arc, +}; use bao_tree::{ - ChunkNum, ChunkRanges, io::{BaoContentItem, Leaf}, + ChunkNum, ChunkRanges, }; use iroh::endpoint::Connection; use tracing::{debug, trace}; use crate::{ - Hash, HashAndFormat, - api::{self, Store, blobs::Blobs}, + api::{self, blobs::Blobs, Store}, get::fsm::{AtBlobHeader, AtEndBlob, BlobContentNext, ConnectedNext, EndBlobNext}, hashseq::{HashSeq, HashSeqIter}, protocol::{ChunkRangesSeq, GetRequest}, store::IROH_BLOCK_SIZE, + Hash, HashAndFormat, }; /// Trait to lazily get a connection pub trait GetConnection { fn connection(&mut self) - -> impl Future> + Send + '_; + -> impl Future> + Send + '_; } /// If we already have a connection, the impl is trivial @@ -1021,8 +1026,8 @@ async fn write_push_request( Ok(request) } -async fn write_observe_request(requst: ObserveRequest, stream: &mut SendStream) -> io::Result<()> { - let request = Request::Observe(requst); +async fn write_observe_request(request: ObserveRequest, stream: &mut SendStream) -> io::Result<()> { + let request = Request::Observe(request); let request_bytes = postcard::to_allocvec(&request) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; stream.write_all(&request_bytes).await?; @@ -1055,7 +1060,7 @@ mod tests { use crate::{ protocol::{ChunkRangesSeq, GetRequest}, - store::fs::{FsStore, tests::INTERESTING_SIZES}, + store::fs::{tests::INTERESTING_SIZES, FsStore}, tests::{add_test_hash_seq, add_test_hash_seq_incomplete}, util::ChunkRangesExt, }; diff --git a/src/api/tags.rs b/src/api/tags.rs index 34a837c..b235a8c 100644 --- a/src/api/tags.rs +++ b/src/api/tags.rs @@ -13,10 +13,10 @@ pub use super::proto::{ TagInfo, }; use super::{ - ApiClient, Tag, TempTag, proto::{CreateTempTagRequest, Scope}, + ApiClient, Tag, TempTag, }; -use crate::{HashAndFormat, api::proto::ListTempTagsRequest}; +use crate::{api::proto::ListTempTagsRequest, HashAndFormat}; /// The API for interacting with tags and temp tags. #[derive(Debug, Clone, ref_cast::RefCast)] diff --git a/src/format/collection.rs b/src/format/collection.rs index 54b0aac..37d90ec 100644 --- a/src/format/collection.rs +++ b/src/format/collection.rs @@ -7,11 +7,11 @@ use bytes::Bytes; use serde::{Deserialize, Serialize}; use crate::{ - BlobFormat, Hash, - api::{Store, blobs::AddBytesOptions}, - get::{Stats, fsm}, + api::{blobs::AddBytesOptions, Store}, + get::{fsm, Stats}, hashseq::HashSeq, util::temp_tag::TempTag, + BlobFormat, Hash, }; /// A collection of blobs diff --git a/src/get.rs b/src/get.rs index 3d57d10..049ef48 100644 --- a/src/get.rs +++ b/src/get.rs @@ -23,7 +23,7 @@ use std::{ }; use anyhow::Result; -use bao_tree::{ChunkNum, io::fsm::BaoContentItem}; +use bao_tree::{io::fsm::BaoContentItem, ChunkNum}; use fsm::RequestCounters; use iroh::endpoint::{self, RecvStream, SendStream}; use iroh_io::TokioStreamReader; @@ -33,7 +33,7 @@ use serde::{Deserialize, Serialize}; use snafu::{Backtrace, IntoError, ResultExt, Snafu}; use tracing::{debug, error}; -use crate::{Hash, protocol::ChunkRangesSeq, store::IROH_BLOCK_SIZE}; +use crate::{protocol::ChunkRangesSeq, store::IROH_BLOCK_SIZE, Hash}; mod error; pub mod request; @@ -91,8 +91,8 @@ pub mod fsm { use std::{io, result}; use bao_tree::{ - BaoTree, ChunkRanges, TreeNode, io::fsm::{OutboardMut, ResponseDecoder, ResponseDecoderNext}, + BaoTree, ChunkRanges, TreeNode, }; use derive_more::From; use iroh::endpoint::Connection; @@ -102,7 +102,7 @@ pub mod fsm { use crate::{ get::error::BadRequestSnafu, protocol::{ - GetManyRequest, GetRequest, MAX_MESSAGE_SIZE, NonEmptyRequestRangeSpecIter, Request, + GetManyRequest, GetRequest, NonEmptyRequestRangeSpecIter, Request, MAX_MESSAGE_SIZE, }, }; diff --git a/src/get/request.rs b/src/get/request.rs index b6796bb..86ffcab 100644 --- a/src/get/request.rs +++ b/src/get/request.rs @@ -7,12 +7,13 @@ //! In addition to these utilities, there are also constructors in [`crate::protocol::ChunkRangesSeq`] //! to construct complex requests. use std::{ + future::{Future, IntoFuture}, pin::Pin, sync::Arc, task::{Context, Poll}, }; -use bao_tree::{ChunkNum, ChunkRanges, io::BaoContentItem}; +use bao_tree::{io::BaoContentItem, ChunkNum, ChunkRanges}; use bytes::Bytes; use genawaiter::sync::{Co, Gen}; use iroh::endpoint::Connection; @@ -22,13 +23,13 @@ use rand::Rng; use snafu::IntoError; use tokio::sync::mpsc; -use super::{GetError, GetResult, Stats, fsm}; +use super::{fsm, GetError, GetResult, Stats}; use crate::{ - Hash, HashAndFormat, get::error::{BadRequestSnafu, LocalFailureSnafu}, hashseq::HashSeq, protocol::{ChunkRangesSeq, GetRequest}, util::ChunkRangesExt, + Hash, HashAndFormat, }; /// Result of a [`get_blob`] request. diff --git a/src/hash.rs b/src/hash.rs index 889c1e9..8190009 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -7,7 +7,7 @@ use bao_tree::blake3; use n0_snafu::SpanTrace; use nested_enum_utils::common_fields; use postcard::experimental::max_size::MaxSize; -use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use snafu::{Backtrace, ResultExt, Snafu}; use crate::store::util::DD; @@ -450,7 +450,7 @@ impl<'de> Deserialize<'de> for HashAndFormat { mod tests { use iroh_test::{assert_eq_hex, hexdump::parse_hexdump}; - use serde_test::{Configure, Token, assert_tokens}; + use serde_test::{assert_tokens, Configure, Token}; use super::*; diff --git a/src/net_protocol.rs b/src/net_protocol.rs index 9df481d..63b50bd 100644 --- a/src/net_protocol.rs +++ b/src/net_protocol.rs @@ -6,7 +6,7 @@ //! //! ```rust //! # async fn example() -> anyhow::Result<()> { -//! use iroh::{Endpoint, protocol::Router}; +//! use iroh::{protocol::Router, Endpoint}; //! use iroh_blobs::{net_protocol::Blobs, store}; //! //! // create a store @@ -36,21 +36,21 @@ //! # } //! ``` -use std::{fmt::Debug, sync::Arc}; +use std::{fmt::Debug, future::Future, sync::Arc}; use iroh::{ - Endpoint, Watcher, endpoint::Connection, protocol::{AcceptError, ProtocolHandler}, + Endpoint, Watcher, }; use tokio::sync::mpsc; use tracing::error; use crate::{ - HashAndFormat, api::Store, provider::{Event, EventSender}, ticket::BlobTicket, + HashAndFormat, }; #[derive(Debug)] diff --git a/src/protocol.rs b/src/protocol.rs index 17e2869..8504319 100644 --- a/src/protocol.rs +++ b/src/protocol.rs @@ -388,7 +388,7 @@ use snafu::{GenerateImplicitData, Snafu}; use tokio::io::AsyncReadExt; pub use crate::util::ChunkRangesExt; -use crate::{BlobFormat, Hash, HashAndFormat, api::blobs::Bitfield, provider::CountingReader}; +use crate::{api::blobs::Bitfield, provider::CountingReader, BlobFormat, Hash, HashAndFormat}; /// Maximum message size is limited to 100MiB for now. pub const MAX_MESSAGE_SIZE: usize = 1024 * 1024; @@ -412,7 +412,7 @@ pub enum Request { /// /// Note that providers will in many cases reject this request, e.g. if /// they don't have write access to the store or don't want to ingest - /// unknonwn data. + /// unknown data. Push(PushRequest), /// Get multiple blobs in a single request, from a single provider /// @@ -721,8 +721,8 @@ pub mod builder { use super::ChunkRangesSeq; use crate::{ - Hash, protocol::{GetManyRequest, GetRequest}, + Hash, }; #[derive(Debug, Clone, Default)] diff --git a/src/protocol/range_spec.rs b/src/protocol/range_spec.rs index bd22742..c60414d 100644 --- a/src/protocol/range_spec.rs +++ b/src/protocol/range_spec.rs @@ -9,7 +9,7 @@ use std::{fmt, sync::OnceLock}; use bao_tree::{ChunkNum, ChunkRanges, ChunkRangesRef}; use serde::{Deserialize, Serialize}; -use smallvec::{SmallVec, smallvec}; +use smallvec::{smallvec, SmallVec}; static CHUNK_RANGES_EMPTY: OnceLock = OnceLock::new(); diff --git a/src/provider.rs b/src/provider.rs index 768a934..ff2d4a0 100644 --- a/src/provider.rs +++ b/src/provider.rs @@ -15,23 +15,23 @@ use std::{ use anyhow::{Context, Result}; use bao_tree::ChunkRanges; use iroh::{ - NodeId, endpoint::{self, RecvStream, SendStream}, + NodeId, }; use irpc::channel::oneshot; use n0_future::StreamExt; use serde::de::DeserializeOwned; use tokio::{io::AsyncRead, select, sync::mpsc}; -use tracing::{Instrument, debug, debug_span, error, warn}; +use tracing::{debug, debug_span, error, warn, Instrument}; use crate::{ - Hash, - api::{self, Store, blobs::Bitfield}, + api::{self, blobs::Bitfield, Store}, hashseq::HashSeq, protocol::{ ChunkRangesSeq, GetManyRequest, GetRequest, ObserveItem, ObserveRequest, PushRequest, Request, }, + Hash, }; /// Provider progress events, to keep track of what the provider is doing. diff --git a/src/store/fs.rs b/src/store/fs.rs index 64437ac..1724e89 100644 --- a/src/store/fs.rs +++ b/src/store/fs.rs @@ -43,7 +43,7 @@ //! //! For tasks that are specific to a hash, a HashContext combines the task //! context with a slot from the table of the main actor that can be used -//! to obtain an unqiue handle for the hash. +//! to obtain an unique handle for the hash. //! //! # Runtime //! @@ -75,12 +75,12 @@ use std::{ }; use bao_tree::{ - ChunkNum, ChunkRanges, io::{ - BaoContentItem, Leaf, - mixed::{EncodedItem, ReadBytesAt, traverse_ranges_validated}, + mixed::{traverse_ranges_validated, EncodedItem, ReadBytesAt}, sync::ReadAt, + BaoContentItem, Leaf, }, + ChunkNum, ChunkRanges, }; use bytes::Bytes; use delete_set::{BaoFilePart, ProtectHandle}; @@ -88,7 +88,7 @@ use entry_state::{DataLocation, OutboardLocation}; use gc::run_gc; use import::{ImportEntry, ImportSource}; use irpc::channel::mpsc; -use meta::{Snapshot, list_blobs}; +use meta::{list_blobs, Snapshot}; use n0_future::{future::yield_now, io}; use nested_enum_utils::enum_conversions; use range_collections::range_set::RangeSetRange; @@ -97,22 +97,22 @@ use tracing::{error, instrument, trace}; use crate::{ api::{ - ApiClient, proto::{ - self, BatchMsg, BatchResponse, Bitfield, Command, CreateTempTagMsg, ExportBaoMsg, - ExportBaoRequest, ExportPathMsg, ExportPathRequest, ExportRangesItem, ExportRangesMsg, - ExportRangesRequest, HashSpecific, ImportBaoMsg, ImportBaoRequest, ObserveMsg, Scope, - bitfield::is_validated, + self, bitfield::is_validated, BatchMsg, BatchResponse, Bitfield, Command, + CreateTempTagMsg, ExportBaoMsg, ExportBaoRequest, ExportPathMsg, ExportPathRequest, + ExportRangesItem, ExportRangesMsg, ExportRangesRequest, HashSpecific, ImportBaoMsg, + ImportBaoRequest, ObserveMsg, Scope, }, + ApiClient, }, store::{ - Hash, util::{BaoTreeSender, FixedSize, MemOrFile, ValueOrPoisioned}, + Hash, }, util::{ - ChunkRangesExt, channel::oneshot, temp_tag::{TagDrop, TempTag, TempTagScope, TempTags}, + ChunkRangesExt, }, }; mod bao_file; @@ -124,15 +124,16 @@ mod meta; pub mod options; pub(crate) mod util; use entry_state::EntryState; -use import::{ImportEntryMsg, import_byte_stream, import_bytes, import_path}; +use import::{import_byte_stream, import_bytes, import_path, ImportEntryMsg}; use options::Options; use tracing::Instrument; mod gc; use super::HashAndFormat; use crate::api::{ - self, Store, + self, blobs::{AddProgressItem, ExportMode, ExportProgressItem}, + Store, }; /// Create a 16 byte unique ID. @@ -1263,10 +1264,10 @@ pub mod tests { use std::collections::{HashMap, HashSet}; use bao_tree::{ - ChunkRanges, io::{outboard::PreOrderMemOutboard, round_up_to_chunks_groups}, + ChunkRanges, }; - use n0_future::{Stream, StreamExt, stream}; + use n0_future::{stream, Stream, StreamExt}; use testresult::TestResult; use walkdir::WalkDir; @@ -1274,8 +1275,8 @@ pub mod tests { use crate::{ api::blobs::Bitfield, store::{ + util::{read_checksummed, SliceInfoExt, Tag}, HashAndFormat, IROH_BLOCK_SIZE, - util::{SliceInfoExt, Tag, read_checksummed}, }, }; diff --git a/src/store/fs/bao_file.rs b/src/store/fs/bao_file.rs index 67a4b0f..410317c 100644 --- a/src/store/fs/bao_file.rs +++ b/src/store/fs/bao_file.rs @@ -8,38 +8,39 @@ use std::{ }; use bao_tree::{ - BaoTree, ChunkRanges, blake3, + blake3, io::{ fsm::BaoContentItem, mixed::ReadBytesAt, outboard::PreOrderOutboard, sync::{ReadAt, WriteAt}, }, + BaoTree, ChunkRanges, }; use bytes::{Bytes, BytesMut}; use derive_more::Debug; use irpc::channel::mpsc; use tokio::sync::watch; -use tracing::{Span, debug, error, info, trace}; +use tracing::{debug, error, info, trace, Span}; use super::{ - BaoFilePart, entry_state::{DataLocation, EntryState, OutboardLocation}, meta::Update, options::{Options, PathOptions}, + BaoFilePart, }; use crate::{ api::blobs::Bitfield, store::{ - Hash, IROH_BLOCK_SIZE, fs::{ + meta::{raw_outboard_size, Set}, TaskContext, - meta::{Set, raw_outboard_size}, }, util::{ - DD, FixedSize, MemOrFile, PartialMemStorage, SizeInfo, SparseMemFile, - read_checksummed_and_truncate, write_checksummed, + read_checksummed_and_truncate, write_checksummed, FixedSize, MemOrFile, + PartialMemStorage, SizeInfo, SparseMemFile, DD, }, + Hash, IROH_BLOCK_SIZE, }, }; diff --git a/src/store/fs/gc.rs b/src/store/fs/gc.rs index 5297889..c0bf0be 100644 --- a/src/store/fs/gc.rs +++ b/src/store/fs/gc.rs @@ -5,7 +5,7 @@ use genawaiter::sync::{Co, Gen}; use n0_future::{Stream, StreamExt}; use tracing::{debug, error, warn}; -use crate::{Hash, HashAndFormat, api::Store}; +use crate::{api::Store, Hash, HashAndFormat}; /// An event related to GC #[derive(Debug)] @@ -199,10 +199,10 @@ mod tests { use super::*; use crate::{ - BlobFormat, - api::{Store, blobs::AddBytesOptions}, + api::{blobs::AddBytesOptions, Store}, hashseq::HashSeq, store::fs::{options::PathOptions, tests::create_n0_bao}, + BlobFormat, }; async fn gc_smoke(store: &Store) -> TestResult<()> { diff --git a/src/store/fs/import.rs b/src/store/fs/import.rs index d2802c7..1502ffe 100644 --- a/src/store/fs/import.rs +++ b/src/store/fs/import.rs @@ -19,23 +19,22 @@ use std::{ }; use bao_tree::{ - BaoTree, ChunkNum, io::outboard::{PreOrderMemOutboard, PreOrderOutboard}, + BaoTree, ChunkNum, }; use bytes::Bytes; use genawaiter::sync::Gen; use irpc::{ - Channels, WithChannels, channel::{mpsc, none::NoReceiver}, + Channels, WithChannels, }; -use n0_future::{Stream, StreamExt, stream}; +use n0_future::{stream, Stream, StreamExt}; use ref_cast::RefCast; use smallvec::SmallVec; use tracing::{instrument, trace}; -use super::{TaskContext, meta::raw_outboard_size, options::Options}; +use super::{meta::raw_outboard_size, options::Options, TaskContext}; use crate::{ - BlobFormat, Hash, api::{ blobs::{AddProgressItem, ImportMode}, proto::{ @@ -45,10 +44,11 @@ use crate::{ }, }, store::{ + util::{MemOrFile, DD}, IROH_BLOCK_SIZE, - util::{DD, MemOrFile}, }, util::{outboard_with_progress::init_outboard, sink::Sink}, + BlobFormat, Hash, }; /// An import source. @@ -530,16 +530,12 @@ mod tests { } fn assert_expected_progress(progress: &[AddProgressItem]) { - assert!( - progress - .iter() - .any(|x| matches!(&x, AddProgressItem::Size { .. })) - ); - assert!( - progress - .iter() - .any(|x| matches!(&x, AddProgressItem::CopyDone)) - ); + assert!(progress + .iter() + .any(|x| matches!(&x, AddProgressItem::Size { .. }))); + assert!(progress + .iter() + .any(|x| matches!(&x, AddProgressItem::CopyDone))); } fn chunk_bytes(data: Bytes, chunk_size: usize) -> impl Iterator { diff --git a/src/store/fs/meta.rs b/src/store/fs/meta.rs index 3d6c545..f398860 100644 --- a/src/store/fs/meta.rs +++ b/src/store/fs/meta.rs @@ -37,13 +37,13 @@ use tables::{ReadOnlyTables, ReadableTables, Tables}; use tracing::{debug, error, info_span, trace}; use super::{ - BaoFilePart, delete_set::DeleteHandle, entry_state::{DataLocation, EntryState, OutboardLocation}, options::BatchOptions, util::PeekableReceiver, + BaoFilePart, }; -use crate::store::{Hash, IROH_BLOCK_SIZE, util::Tag}; +use crate::store::{util::Tag, Hash, IROH_BLOCK_SIZE}; /// Error type for message handler functions of the redb actor. /// diff --git a/src/store/fs/meta/proto.rs b/src/store/fs/meta/proto.rs index 972743b..6f4aaa6 100644 --- a/src/store/fs/meta/proto.rs +++ b/src/store/fs/meta/proto.rs @@ -7,13 +7,13 @@ use tracing::Span; use super::{ActorResult, ReadOnlyTables}; use crate::{ - Hash, api::proto::{ BlobStatusMsg, ClearProtectedMsg, DeleteBlobsMsg, ProcessExitRequest, ShutdownMsg, SyncDbMsg, }, store::{fs::entry_state::EntryState, util::DD}, util::channel::oneshot, + Hash, }; /// Get the entry state for a hash. diff --git a/src/store/fs/meta/tables.rs b/src/store/fs/meta/tables.rs index 99137bd..a983a27 100644 --- a/src/store/fs/meta/tables.rs +++ b/src/store/fs/meta/tables.rs @@ -2,7 +2,7 @@ use redb::{ReadableTable, TableDefinition, TableError}; use super::EntryState; -use crate::store::{Hash, HashAndFormat, fs::delete_set::FileTransaction, util::Tag}; +use crate::store::{fs::delete_set::FileTransaction, util::Tag, Hash, HashAndFormat}; pub(super) const BLOBS_TABLE: TableDefinition = TableDefinition::new("blobs-0"); diff --git a/src/store/mem.rs b/src/store/mem.rs index 5edb03f..43988b7 100644 --- a/src/store/mem.rs +++ b/src/store/mem.rs @@ -9,6 +9,7 @@ //! to the file system. use std::{ collections::{BTreeMap, HashMap, HashSet}, + future::Future, io::{self, Write}, num::NonZeroU64, ops::Deref, @@ -17,13 +18,14 @@ use std::{ }; use bao_tree::{ - BaoTree, ChunkNum, ChunkRanges, TreeNode, blake3, + blake3, io::{ - BaoContentItem, Leaf, - mixed::{EncodedItem, ReadBytesAt, traverse_ranges_validated}, + mixed::{traverse_ranges_validated, EncodedItem, ReadBytesAt}, outboard::PreOrderMemOutboard, sync::{Outboard, ReadAt, WriteAt}, + BaoContentItem, Leaf, }, + BaoTree, ChunkNum, ChunkRanges, TreeNode, }; use bytes::Bytes; use irpc::channel::mpsc; @@ -34,13 +36,12 @@ use tokio::{ sync::watch, task::{JoinError, JoinSet}, }; -use tracing::{Instrument, error, info, instrument, trace}; +use tracing::{error, info, instrument, trace, Instrument}; use super::util::{BaoTreeSender, PartialMemStorage}; use crate::{ - BlobFormat, Hash, api::{ - self, ApiClient, + self, blobs::{AddProgressItem, Bitfield, BlobStatus, ExportProgressItem}, proto::{ BatchMsg, BatchResponse, BlobDeleteRequest, BlobStatusMsg, BlobStatusRequest, Command, @@ -53,15 +54,17 @@ use crate::{ SetTagRequest, ShutdownMsg, SyncDbMsg, }, tags::TagInfo, + ApiClient, }, store::{ - HashAndFormat, IROH_BLOCK_SIZE, util::{SizeInfo, SparseMemFile, Tag}, + HashAndFormat, IROH_BLOCK_SIZE, }, util::{ - ChunkRangesExt, temp_tag::{TagDrop, TempTagScope, TempTags}, + ChunkRangesExt, }, + BlobFormat, Hash, }; #[derive(Debug, Default)] @@ -772,7 +775,7 @@ pub struct DataReader(BaoFileHandle); impl ReadBytesAt for DataReader { fn read_bytes_at(&self, offset: u64, size: usize) -> std::io::Result { - let entry = self.0.0.state.borrow(); + let entry = self.0 .0.state.borrow(); entry.data().read_bytes_at(offset, size) } } diff --git a/src/store/readonly_mem.rs b/src/store/readonly_mem.rs index 14e411f..a00cf82 100644 --- a/src/store/readonly_mem.rs +++ b/src/store/readonly_mem.rs @@ -13,13 +13,13 @@ use std::{ }; use bao_tree::{ - BaoTree, ChunkRanges, io::{ - Leaf, - mixed::{EncodedItem, ReadBytesAt, traverse_ranges_validated}, + mixed::{traverse_ranges_validated, EncodedItem, ReadBytesAt}, outboard::PreOrderMemOutboard, sync::ReadAt, + Leaf, }, + BaoTree, ChunkRanges, }; use bytes::Bytes; use irpc::channel::mpsc; @@ -30,9 +30,8 @@ use tokio::task::{JoinError, JoinSet}; use super::util::BaoTreeSender; use crate::{ - Hash, api::{ - self, ApiClient, TempTag, + self, blobs::{Bitfield, ExportProgressItem}, proto::{ self, BlobStatus, Command, ExportBaoMsg, ExportBaoRequest, ExportPathMsg, @@ -40,9 +39,11 @@ use crate::{ ImportBaoMsg, ImportByteStreamMsg, ImportBytesMsg, ImportPathMsg, ObserveMsg, ObserveRequest, }, + ApiClient, TempTag, }, - store::{IROH_BLOCK_SIZE, mem::CompleteStorage}, + store::{mem::CompleteStorage, IROH_BLOCK_SIZE}, util::ChunkRangesExt, + Hash, }; #[derive(Debug, Clone)] diff --git a/src/store/util.rs b/src/store/util.rs index cf11f6c..1547ff3 100644 --- a/src/store/util.rs +++ b/src/store/util.rs @@ -16,9 +16,9 @@ mod mem_or_file; mod sparse_mem_file; use irpc::channel::mpsc; pub use mem_or_file::{FixedSize, MemOrFile}; -use range_collections::{RangeSetRef, range_set::RangeSetEntry}; +use range_collections::{range_set::RangeSetEntry, RangeSetRef}; use ref_cast::RefCast; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; pub use sparse_mem_file::SparseMemFile; pub mod observer; mod size_info; diff --git a/src/store/util/partial_mem_storage.rs b/src/store/util/partial_mem_storage.rs index f77777c..2b658bf 100644 --- a/src/store/util/partial_mem_storage.rs +++ b/src/store/util/partial_mem_storage.rs @@ -1,11 +1,11 @@ use std::io; use bao_tree::{ + io::{sync::WriteAt, BaoContentItem}, BaoTree, - io::{BaoContentItem, sync::WriteAt}, }; -use super::{SparseMemFile, size_info::SizeInfo}; +use super::{size_info::SizeInfo, SparseMemFile}; use crate::{api::blobs::Bitfield, store::IROH_BLOCK_SIZE}; /// An incomplete entry, with all the logic to keep track of the state of the entry diff --git a/src/store/util/sparse_mem_file.rs b/src/store/util/sparse_mem_file.rs index 9678814..2f86878 100644 --- a/src/store/util/sparse_mem_file.rs +++ b/src/store/util/sparse_mem_file.rs @@ -5,7 +5,7 @@ use bao_tree::io::{ sync::{ReadAt, Size, WriteAt}, }; use bytes::Bytes; -use range_collections::{RangeSet2, range_set::RangeSetRange}; +use range_collections::{range_set::RangeSetRange, RangeSet2}; /// A file that is sparse in memory /// diff --git a/src/test.rs b/src/test.rs index fecd183..c0760a0 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,10 +1,12 @@ -use n0_future::{StreamExt, stream}; +use std::future::IntoFuture; + +use n0_future::{stream, StreamExt}; use rand::{RngCore, SeedableRng}; use crate::{ - BlobFormat, - api::{RequestResult, Store, blobs::AddBytesOptions, tags::TagInfo}, + api::{blobs::AddBytesOptions, tags::TagInfo, RequestResult, Store}, hashseq::HashSeq, + BlobFormat, }; pub async fn create_random_blobs( diff --git a/src/tests.rs b/src/tests.rs index b4a61f0..0e8634a 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -2,17 +2,16 @@ use std::{collections::HashSet, io, ops::Range, path::PathBuf}; use bao_tree::ChunkRanges; use bytes::Bytes; -use iroh::{Endpoint, NodeId, Watcher, protocol::Router}; +use iroh::{protocol::Router, Endpoint, NodeId, Watcher}; use irpc::RpcMessage; -use n0_future::{StreamExt, task::AbortOnDropHandle}; +use n0_future::{task::AbortOnDropHandle, StreamExt}; use tempfile::TempDir; use testresult::TestResult; use tokio::sync::{mpsc, watch}; use tracing::info; use crate::{ - BlobFormat, Hash, HashAndFormat, - api::{Store, blobs::Bitfield}, + api::{blobs::Bitfield, Store}, get, hashseq::HashSeq, net_protocol::Blobs, @@ -20,13 +19,14 @@ use crate::{ provider::Event, store::{ fs::{ + tests::{create_n0_bao, test_data, INTERESTING_SIZES}, FsStore, - tests::{INTERESTING_SIZES, create_n0_bao, test_data}, }, mem::MemStore, util::observer::Combine, }, util::sink::Drain, + BlobFormat, Hash, HashAndFormat, }; // #[tokio::test] diff --git a/src/util.rs b/src/util.rs index 35351fc..e110be0 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,7 +1,7 @@ use std::ops::{Bound, RangeBounds}; -use bao_tree::{ChunkNum, ChunkRanges, io::round_up_to_chunks}; -use range_collections::{RangeSet2, range_set::RangeSetEntry}; +use bao_tree::{io::round_up_to_chunks, ChunkNum, ChunkRanges}; +use range_collections::{range_set::RangeSetEntry, RangeSet2}; pub mod channel; pub(crate) mod temp_tag; @@ -11,8 +11,8 @@ pub mod serde { use std::{fmt, io}; use serde::{ - Deserializer, Serializer, de::{self, Visitor}, + Deserializer, Serializer, }; pub fn serialize(error: &io::Error, serializer: S) -> Result @@ -122,12 +122,13 @@ pub mod outboard_with_progress { use std::io::{self, BufReader, Read}; use bao_tree::{ - BaoTree, ChunkNum, blake3, + blake3, io::{ outboard::PreOrderOutboard, sync::{OutboardMut, WriteAt}, }, iter::BaoChunk, + BaoTree, ChunkNum, }; use smallvec::SmallVec; @@ -152,7 +153,7 @@ pub mod outboard_with_progress { right_child: &blake3::Hash, is_root: bool, ) -> blake3::Hash { - use blake3::hazmat::{ChainingValue, Mode, merge_subtrees_non_root, merge_subtrees_root}; + use blake3::hazmat::{merge_subtrees_non_root, merge_subtrees_root, ChainingValue, Mode}; let left_child: ChainingValue = *left_child.as_bytes(); let right_child: ChainingValue = *right_child.as_bytes(); if is_root { @@ -234,13 +235,14 @@ pub mod outboard_with_progress { #[cfg(test)] mod tests { use bao_tree::{ - BaoTree, blake3, + blake3, io::{outboard::PreOrderOutboard, sync::CreateOutboard}, + BaoTree, }; use testresult::TestResult; use crate::{ - store::{IROH_BLOCK_SIZE, fs::tests::test_data}, + store::{fs::tests::test_data, IROH_BLOCK_SIZE}, util::{outboard_with_progress::init_outboard, sink::Drain}, }; @@ -265,7 +267,7 @@ pub mod outboard_with_progress { } pub mod sink { - use std::io; + use std::{future::Future, io}; use irpc::RpcMessage; diff --git a/src/util/temp_tag.rs b/src/util/temp_tag.rs index e3fa211..feb333b 100644 --- a/src/util/temp_tag.rs +++ b/src/util/temp_tag.rs @@ -7,7 +7,7 @@ use std::{ use serde::{Deserialize, Serialize}; use tracing::{trace, warn}; -use crate::{BlobFormat, Hash, HashAndFormat, api::proto::Scope}; +use crate::{api::proto::Scope, BlobFormat, Hash, HashAndFormat}; /// An ephemeral, in-memory tag that protects content while the process is running. /// diff --git a/tests/blobs.rs b/tests/blobs.rs index e055d8b..dcb8118 100644 --- a/tests/blobs.rs +++ b/tests/blobs.rs @@ -5,12 +5,12 @@ use std::{ }; use iroh_blobs::{ - Hash, api::{ - Store, blobs::{AddProgressItem, Blobs}, + Store, }, store::{fs::FsStore, mem::MemStore}, + Hash, }; use n0_future::StreamExt; use testresult::TestResult; diff --git a/tests/tags.rs b/tests/tags.rs index 2a98f0e..3864bc5 100644 --- a/tests/tags.rs +++ b/tests/tags.rs @@ -4,12 +4,13 @@ use std::{ }; use iroh_blobs::{ - BlobFormat, Hash, HashAndFormat, api::{ - self, Store, + self, tags::{TagInfo, Tags}, + Store, }, store::{fs::FsStore, mem::MemStore}, + BlobFormat, Hash, HashAndFormat, }; use n0_future::{Stream, StreamExt}; use testresult::TestResult;