diff --git a/.cargo/config.toml b/.cargo/config.toml index b44a938e2904..3bd9e9b437d3 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -7,3 +7,19 @@ run-wasm = "run --release --package run_wasm --" # Some of our build.rs files only run if this is set, # so that we don't run them on cargo publish or on users machines. IS_IN_RERUN_WORKSPACE = "yes" + + +# [target.x86_64-unknown-linux-gnu] +# linker = "clang" +# rustflags = [ +# "-C", +# "link-arg=-fuse-ld=/usr/bin/mold", +# "-C", +# "split-debuginfo=unpacked", +# ] +# web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses, +# https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html +# as well as WebGPU apis. +# https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html +[target.wasm32-unknown-unknown] +rustflags = ["--cfg=web_sys_unstable_apis"] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 99f0096817a2..f2e91a7e8adf 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -13,5 +13,10 @@ To get an auto-generated PR description you can put "copilot:summary" or "copilo ### What ### Checklist -* [ ] I have read and agree to [Contributor Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and the [Code of Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md) -* [ ] I've included a screenshot or gif (if applicable) + +- [ ] I have read and agree to [Contributor Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and the [Code of Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md) +- [ ] I've included a screenshot or gif (if applicable) + + + +PR Build Summary: {{ pr-build-summary }} diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 000000000000..e93f751b7327 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,66 @@ +# Overview + +Our CI workflows make heavy usage of [Reusable Workflows](https://docs.github.com/en/actions/using-workflows/reusing-workflows). These reusable workflows can then be tested manually via the `manual_dispatch.yml` workflow. +Or integrated into CI jobs such has `on_pull_request.yml` or `on_main.yml`. + +By convention: +- All reusable workflows start with the `reusable_` prefix. +- All workflows that are triggered via `workflow_dispatch` start with the `manual_` prefix. +- All workflows that are triggered via an event start with the `on_` prefix. + - `on_pull_request` is triggered on pull requests. + - `on_push_main` is triggered on pushes to the main branch. + +If you are going to be doing any editing of workflows, the +[VS Code extension](https://marketplace.visualstudio.com/items?itemName=cschleiden.vscode-github-actions) +for GitHub Actions is highly recommended. + +## Reusable Workflows +- [reusable_checks.yml](reusable_checks.yml) - These are all the checks that run to ensure the code is formatted, + linted, and tested. This job produces no artifacts other than a pass/fail criteria for the build. + - `SAVE_CACHE` - If true, the rust cache will be saved. Generally we only do this for builds on `main` +- [reusable_bench.yml](reusable_bench.yml) - This job runs the benchmarks to check for performance regressions. + - `SAVE_BENCH` - If true, then the benchmark results are saved to update https://ref.rerun.io/dev/bench/ +- [reusable_deploy_docs](reusable_deploy_docs.yml) - This job deploys the python and rust documentation to https://ref.rerun.io + - `PY_DOCS_VERSION_NAME` - The name to use for versioning the python docs. This should generally match the version in + `Cargo.toml`. + - `UPDATE_LATEST` - If true, then the docs will be deployed to `latest/` as well as the versioned directory. +- [reusable_build_and_test_wheels.yml](reusable_build_and_test_wheels.yml) - This job builds the wheels, runs the +end-to-end test, and produces a sample RRD. The artifacts are accessible via GitHub artifacts, but not otherwise +uploaded anywhere. + - `MATURIN_FEATURE_FLAGS` - The feature flags to pass to maturin. + - `PLATFORM` - Which platform to build for: `linux`, `macos-arm`, `macos-intel`, or `windows`. + - `RELEASE_VERSION` - If producing a release, the version number. This must match the version in `Cargo.toml`. + - `RRD_ARTIFACT_NAME` - Intermediate name of the GitHub rrd artifact for passing to `reusable_upload_wheels.yml` + - `SAVE_CACHE` - If true, the rust cache will be saved. Generally we only do this for builds on `main` + - `WHEEL_ARTIFACT_NAME` - Intermediate name of the GitHub wheel artifact for passing to `reusable_upload_wheels.yml` +- [reusable_upload_wheels.yml](reusable_upload_wheels.yml) - This job uploads the wheels to google cloud + - `RRD_ARTIFACT_NAME` - Intermediate name of the GitHub rrd artifact. This should match the name passed to + `reusable_build_and_test_wheels.yml` + - `WHEEL_ARTIFACT_NAME` - Intermediate name of the GitHub wheel artifact. This should match the name passed to + `reusable_build_and_test_wheels.yml` +- [reusable_build_web.yml](reusable_build_web.yml) - This job builds the wasm artifacts for the web. + - `RELEASE_VERSION` - If producing a release, the version number. This must match the version in `Cargo.toml`. +- [reusable_upload_web.yml](reusable_upload_web.yml) - This job uploads the web assets to google cloud. By default this + only uploads to: `app.rerun.io/commit//` + - `MARK_PRERELEASE_FOR_MAINLINE` - If true, then the web assets will go to `app.rerun.io/preleease/ + - `MARK_TAGGED_VERSION` - If true, then the web assets will go to `app.rerun.io/version/` + - `RELEASE_VERSION` - If producing a release, the version number. + - `RRD_ARTIFACT_NAME` - Intermediate name of the GitHub rrd artifact. This should match the name passed to + `reusable_build_and_test_wheels.yml` + - `UPLOAD_COMMIT_OVERRIDE` - If set, will replace the value of ``. This is necessary because we want pull + request builds associated with their originating commit, even if the web-build happens on an ephemeral merge-commit. +- [reusable_pr_summary.yml](reusable_pr_summary.yml) - This job updates the PR summary with the results of the CI run. + - This summary can be found at: + `https://build.rerun.io/pr//` + - `PR_NUMBER` - The PR number to update. This will generally be set by the `on_pull_request.yml` workflow using: + `${{github.event.pull_request.number}}` + +## Manual Workflows +- [manual_dispatch](manual_dispatch.yml) - This workflow is used to manually trigger the assorted reusable workflows for + testing. + - See the workflow file for the list of parameters. +- [manual_build_wheels_for_pr.yml](manual_build_wheels_for_pr.yml) - This workflow can be dispatched on a branch and + will build all of the wheels for the associated pull-request. Uses: + - [reusable_build_and_test_wheels.yml](reusable_build_and_test_wheels.yml) + - [reusable_upload_wheels.yml](reusable_upload_wheels.yml) + - [reusable_pr_summary.yml](reusable_pr_summary.yml) diff --git a/.github/workflows/manual_adhoc_web.yml b/.github/workflows/manual_adhoc_web.yml new file mode 100644 index 000000000000..b0d8a6e9d939 --- /dev/null +++ b/.github/workflows/manual_adhoc_web.yml @@ -0,0 +1,48 @@ +name: Run an adhoc web build + +on: + workflow_dispatch: + inputs: + EXTRA_FLAGS: + type: string + description: 'Extra flags to pass to `re_build_web_viewer`' + required: false + default: '' + ADHOC_NAME: + type: string + description: 'Name of the stored adhoc build' + required: true + +jobs: + + min-test-wheel: + name: 'Minimum Test Wheel' + # The upload-web job uses the min-test-wheel to get the RRD + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: adhoc-web-${{ github.event.inputs.ADHOC_NAME }} + PLATFORM: linux + MATURIN_FEATURE_FLAGS: '--no-default-features --features extension-module' + WHEEL_ARTIFACT_NAME: '' + RRD_ARTIFACT_NAME: linux-rrd-fast + secrets: inherit + + build-web-adhoc: + name: 'Build Web' + uses: ./.github/workflows/reusable_build_web.yml + with: + CONCURRENCY: adhoc-web-${{ github.event.inputs.ADHOC_NAME }} + EXTRA_FLAGS: ${{ github.event.inputs.EXTRA_FLAGS }} + secrets: inherit + + upload-web: + name: 'Upload Web' + needs: [min-test-wheel, build-web-adhoc] + uses: ./.github/workflows/reusable_upload_web.yml + with: + CONCURRENCY: adhoc-web-${{ github.event.inputs.ADHOC_NAME }} + MARK_PRERELEASE_FOR_MAINLINE: false + MARK_TAGGED_VERSION: false + ADHOC_NAME: ${{ github.event.inputs.ADHOC_NAME }} + RRD_ARTIFACT_NAME: linux-rrd-fast + secrets: inherit diff --git a/.github/workflows/manual_build_wheels_for_pr.yml b/.github/workflows/manual_build_wheels_for_pr.yml new file mode 100644 index 000000000000..4f9c23beee09 --- /dev/null +++ b/.github/workflows/manual_build_wheels_for_pr.yml @@ -0,0 +1,129 @@ +name: Build and Upload Wheels for PR + +on: + workflow_dispatch: + +jobs: + + check-for-pr: + runs-on: ubuntu-latest + outputs: + PR_NUMBER: ${{ steps.get_pr.outputs.PR_NUMBER }} + steps: + - name: Check if commit belongs to a PR + id: get_pr + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + pr_number=$(curl --silent --header "Authorization: Bearer ${GITHUB_TOKEN}" \ + --url "https://api.github.com/repos/${GITHUB_REPOSITORY}/commits/${GITHUB_SHA}/pulls" \ + | jq '.[] | .number') + + if [ -z "$pr_number" ]; then + echo "No PR associated with this commit" + exit 1 + else + echo "Commit is associated with PR: $pr_number" + echo "PR_NUMBER=$pr_number" >> "$GITHUB_OUTPUT" + fi + + build-linux: + needs: [check-for-pr] + name: 'Linux: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-wheels-linux-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PLATFORM: linux + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + build-windows: + needs: [check-for-pr] + name: 'Windows: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-wheels-windows-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PLATFORM: windows + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: '' + secrets: inherit + + build-macos-arm: + needs: [check-for-pr] + name: 'Macos-Arm: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-wheels-macos-arm-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PLATFORM: macos-arm + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: '' + secrets: inherit + + build-macos-intel: + needs: [check-for-pr] + name: 'Macos-Intel: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-wheels-macos-intel-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PLATFORM: macos-intel + WHEEL_ARTIFACT_NAME: 'macos-intel-wheel' + RRD_ARTIFACT_NAME: '' + secrets: inherit + + upload-wheels-linux: + name: 'Linux: Upload Wheels' + needs: [build-linux] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-wheels-linux-${{ needs.check-for-pr.outputs.PR_NUMBER }} + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-windows: + name: 'Windows: Upload Wheels' + needs: [build-linux, build-windows] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-wheels-windows-${{ needs.check-for-pr.outputs.PR_NUMBER }} + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-arm: + name: 'Macos-Arm: Upload Wheels' + needs: [build-linux, build-macos-arm] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-wheels--macos-arm-${{ needs.check-for-pr.outputs.PR_NUMBER }} + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-intel: + name: 'Macos-Intel: Upload Wheels' + needs: [build-linux, build-macos-intel] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-wheels-macos-intel-${{ needs.check-for-pr.outputs.PR_NUMBER }} + WHEEL_ARTIFACT_NAME: macos-intel-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + generate-wheel-index: + name: 'Generate Pip Index' + needs: [check-for-pr, upload-wheels-linux, upload-wheels-windows, upload-wheels-macos-arm, upload-wheels-macos-intel] + uses: ./.github/workflows/reusable_pip_index.yml + with: + CONCURRENCY: manual-wheels-${{ needs.check-for-pr.outputs.PR_NUMBER }} + secrets: inherit + + update-pr-summary: + name: 'Update PR Summary' + needs: [check-for-pr, upload-wheels-linux, upload-wheels-windows, upload-wheels-macos-arm, upload-wheels-macos-intel] + uses: ./.github/workflows/reusable_pr_summary.yml + with: + CONCURRENCY: manual-wheels-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PR_NUMBER: ${{ needs.check-for-pr.outputs.PR_NUMBER}} + secrets: inherit diff --git a/.github/workflows/manual_dispatch.yml b/.github/workflows/manual_dispatch.yml new file mode 100644 index 000000000000..43722ddde620 --- /dev/null +++ b/.github/workflows/manual_dispatch.yml @@ -0,0 +1,270 @@ +name: Manually Dispatch Workflows + +on: + workflow_dispatch: + # NOTE: boolean inputs are still actually strings + # See: https://github.com/actions/runner/issues/1483 + inputs: + # Sorted based on job workflow logic rather than alphabetical + CHECKS: + description: 'Run reuseable_checks' + type: boolean + required: false + default: true + + DEPLOY_DOCS: + description: 'Run reusable_deploy_docs' + type: boolean + required: false + default: false + + BENCHES: + description: 'Run reusable_bench.yml' + type: boolean + required: false + default: false + + MIN_TEST_WHEEL: + description: 'Run reuseable_build_and_test_wheels (Minimal Wheel Linux Only)' + type: boolean + required: false + + WHEEL_PLATFORMS: + description: 'Run reusable_build_and_test_wheels (Full build: linux,windows,macos-arm,macos-intel)' + type: string + required: false + default: 'linux' + + BUILD_WEB: + description: 'Run reusable_build_web' + type: boolean + required: false + default: true + + RELEASE_VERSION: + description: 'Release Version Number (Must match Cargo.toml)' + type: string + required: false + default: 'prerelease' + + UPLOAD_GCLOUD: + description: 'Upload wheels and wasm to gcloud' + type: boolean + required: false + default: false + + UPDATE_PR_SUMMARY: + description: 'Update the PR Summary' + type: boolean + required: false + default: false + + SAVE_CACHE: + description: 'Save the rust-cache where relevant' + type: boolean + required: false + default: false + +jobs: + + checks: + name: Run All Checks + if: ${{ github.event.inputs.CHECKS == 'true' }} + uses: ./.github/workflows/reusable_checks.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + secrets: inherit + + deploy-docs: + # Never deploy docs if checks haven't passed + needs: [checks] + name: Deploy Docs + if: ${{ github.event.inputs.DEPLOY_DOCS == 'true' }} + uses: ./.github/workflows/reusable_deploy_docs.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + PY_DOCS_VERSION_NAME: "test" + UPDATE_LATEST: false + secrets: inherit + + benches: + name: Benchmarks + if: ${{ github.event.inputs.BENCHES == 'true' }} + uses: ./.github/workflows/reusable_bench.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + secrets: inherit + + min-test-wheel: + name: 'Minimum Test Wheel' + # The upload-web job uses the min-test-wheel to get the RRD + if: ${{ (github.event.inputs.MIN_TEST_WHEEL == 'true') || ((github.event.inputs.UPLOAD_GCLOUD == 'true') && ( github.event.inputs.BUILD_WEB == 'true') ) }} + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + PLATFORM: linux + MATURIN_FEATURE_FLAGS: '--no-default-features --features extension-module' + WHEEL_ARTIFACT_NAME: '' + RRD_ARTIFACT_NAME: linux-rrd-fast + secrets: inherit + + build-linux: + name: 'Linux: Build/Test Wheels' + # The upload-wheels jobs all use the linux build to get the RRD + # TODO(jleibs): Debug why multi-line if statements don't work here + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'linux') || ((github.event.inputs.UPLOAD_GCLOUD == 'true') && ( github.event.inputs.WHEEL_PLATFORMS != '') ) }} + + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-dispatch-linux-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + PLATFORM: linux + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + RELEASE_VERSION: ${{ github.event.inputs.RELEASE_VERSION }} + secrets: inherit + + build-windows: + name: 'Windows: Build/Test Wheels' + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'windows') }} + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-dispatch-windows-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + PLATFORM: windows + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: '' + RELEASE_VERSION: ${{ github.event.inputs.RELEASE_VERSION }} + secrets: inherit + + build-macos-arm: + name: 'Macos-Arm: Build/Test Wheels' + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'macos-arm') }} + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-dispatch-macos-arm-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + PLATFORM: macos-arm + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: '' + RELEASE_VERSION: ${{ github.event.inputs.RELEASE_VERSION }} + secrets: inherit + + build-macos-intel: + name: 'Macos-Intel: Build/Test Wheels' + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'macos-intel') }} + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: manual-dispatch-macos-intel-${{ github.run_id}} + SAVE_CACHE: ${{ github.event.inputs.SAVE_CACHE == 'true' }} + PLATFORM: macos-intel + WHEEL_ARTIFACT_NAME: 'macos-intel-wheel' + RRD_ARTIFACT_NAME: '' + RELEASE_VERSION: ${{ github.event.inputs.RELEASE_VERSION }} + + secrets: inherit + + upload-wheels-linux: + name: 'Linux: Upload Wheels' + needs: [build-linux] + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'linux') && (github.event.inputs.UPLOAD_GCLOUD == 'true') }} + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-dispatch-linux-${{ github.run_id}} + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-windows: + name: 'Windows: Upload Wheels' + needs: [build-linux, build-windows] + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'windows') && (github.event.inputs.UPLOAD_GCLOUD == 'true') }} + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-dispatch-windows-${{ github.run_id}} + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-arm: + name: 'Macos-Arm: Upload Wheels' + needs: [build-linux, build-macos-arm] + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'macos-arm') && (github.event.inputs.UPLOAD_GCLOUD == 'true') }} + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-dispatch-macos-arm-${{ github.run_id}} + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-intel: + name: 'Macos-Intel: Upload Wheels' + needs: [build-linux, build-macos-intel] + if: ${{ contains(github.event.inputs.WHEEL_PLATFORMS, 'macos-intel') && (github.event.inputs.UPLOAD_GCLOUD == 'true') }} + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: manual-dispatch-macos-intel-${{ github.run_id}} + WHEEL_ARTIFACT_NAME: macos-intel-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + build-web: + name: 'Build Web' + if: ${{ github.event.inputs.BUILD_WEB == 'true'}} + uses: ./.github/workflows/reusable_build_web.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + RELEASE_VERSION: ${{ github.event.inputs.RELEASE_VERSION }} + secrets: inherit + + upload-web: + name: 'Upload Web' + needs: [min-test-wheel, build-web] + if: ${{ (github.event.inputs.BUILD_WEB == 'true') && (github.event.inputs.UPLOAD_GCLOUD == 'true') }} + uses: ./.github/workflows/reusable_upload_web.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + MARK_PRERELEASE_FOR_MAINLINE: false + MARK_TAGGED_VERSION: false + RRD_ARTIFACT_NAME: linux-rrd-fast + secrets: inherit + + check-for-pr: + runs-on: ubuntu-latest + if: ${{ github.event.inputs.UPDATE_PR_SUMMARY == 'true' }} + outputs: + PR_NUMBER: ${{ steps.get_pr.outputs.PR_NUMBER }} + steps: + - name: Check if commit belongs to a PR + id: get_pr + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + pr_number=$(curl --silent --header "Authorization: Bearer ${GITHUB_TOKEN}" \ + --url "https://api.github.com/repos/${GITHUB_REPOSITORY}/commits/${GITHUB_SHA}/pulls" \ + | jq '.[] | .number') + + if [ -z "$pr_number" ]; then + echo "No PR associated with this commit" + exit 1 + else + echo "Commit is associated with PR: $pr_number" + echo "PR_NUMBER=$pr_number" >> "$GITHUB_OUTPUT" + fi + + update-pr-summary: + name: 'Update PR Summary' + # TODO(jleibs): There's no good way to have an optional needs. + # If we leave this here the job fails if we don't build all the wheels + web. + # Since this just just for testing, leave this out. We can manually run it to update + # a PR if we want to see changes. + #needs: [check-for-pr, upload-web, upload-wheels-linux, upload-wheels-windows, upload-wheels-macos-arm, upload-wheels-macos-intel] + needs: [check-for-pr] + if: ${{ github.event.inputs.UPDATE_PR_SUMMARY == 'true' }} + uses: ./.github/workflows/reusable_pr_summary.yml + with: + CONCURRENCY: manual-dispatch-${{ github.run_id}} + PR_NUMBER: ${{ needs.check-for-pr.outputs.PR_NUMBER }} + secrets: inherit diff --git a/.github/workflows/manual_run_bench.yml b/.github/workflows/manual_run_bench.yml new file mode 100644 index 000000000000..05bb2cbc409d --- /dev/null +++ b/.github/workflows/manual_run_bench.yml @@ -0,0 +1,59 @@ +name: Manually run the benchmarks + +on: + workflow_dispatch: + inputs: + BENCH_NAME: + description: 'Name of the benchmark to save' + type: string + required: false + default: '' + COMPARE_TO: + description: 'Saved Benchmark to compare to' + type: string + required: false + default: 'main' + +jobs: + + check-for-pr: + runs-on: ubuntu-latest + outputs: + PR_NUMBER: ${{ steps.get_pr.outputs.PR_NUMBER }} + steps: + - name: Check if commit belongs to a PR + id: get_pr + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + pr_number=$(curl --silent --header "Authorization: Bearer ${GITHUB_TOKEN}" \ + --url "https://api.github.com/repos/${GITHUB_REPOSITORY}/commits/${GITHUB_SHA}/pulls" \ + | jq '.[] | .number') + + if [ -z "$pr_number" ]; then + echo "No PR associated with this commit" + exit 1 + else + echo "Commit is associated with PR: $pr_number" + echo "PR_NUMBER=$pr_number" >> "$GITHUB_OUTPUT" + fi + + run-benches: + name: Benchmarks + needs: [check-for-pr] + uses: ./.github/workflows/reusable_bench.yml + with: + CONCURRENCY: manual-bench-${{ needs.check-for-pr.outputs.PR_NUMBER }} + BENCH_NAME: ${{ github.event.inputs.BENCH_NAME }} + COMPARE_TO: ${{ github.event.inputs.COMPARE_TO }} + secrets: inherit + + + update-pr-summary: + name: 'Update PR Summary' + needs: [check-for-pr, run-benches] + uses: ./.github/workflows/reusable_pr_summary.yml + with: + CONCURRENCY: manual-bench-${{ needs.check-for-pr.outputs.PR_NUMBER }} + PR_NUMBER: ${{ needs.check-for-pr.outputs.PR_NUMBER}} + secrets: inherit diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml deleted file mode 100644 index acdcfff2cd7d..000000000000 --- a/.github/workflows/misc.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: CI (Misc) - -on: - workflow_dispatch: - pull_request: - push: - branches: - - "main" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} # Cancel previous CI jobs on the same branch - cancel-in-progress: true - -jobs: - misc-rerun-lints: - name: Rerun lints - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - - name: Rerun lints - run: | - ./scripts/lint.py - - - name: Check for too large files - run: | - ./scripts/check_large_files.sh diff --git a/.github/workflows/on_pull_request.yml b/.github/workflows/on_pull_request.yml new file mode 100644 index 000000000000..877f34ba392c --- /dev/null +++ b/.github/workflows/on_pull_request.yml @@ -0,0 +1,64 @@ +name: Pull-Request + +on: + pull_request: + types: + - opened + - synchronize + +# These jobs use fairly short names as they are a prefix in the display hierarchy +jobs: + checks: + name: Checks + uses: ./.github/workflows/reusable_checks.yml + with: + CONCURRENCY: pr-${{ github.event.pull_request.number }} + secrets: inherit + + min-test-wheel: + name: "Minimum Wheel" + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: pr-${{ github.event.pull_request.number }} + SAVE_CACHE: false + PLATFORM: linux + MATURIN_FEATURE_FLAGS: "--no-default-features --features extension-module" + WHEEL_ARTIFACT_NAME: "linux-wheel-fast" + RRD_ARTIFACT_NAME: linux-rrd-fast + secrets: inherit + + build-web: + name: "Build Web" + uses: ./.github/workflows/reusable_build_web.yml + with: + CONCURRENCY: pr-${{ github.event.pull_request.number }} + secrets: inherit + + upload-web: + name: "Upload Web" + needs: [min-test-wheel, build-web] + uses: ./.github/workflows/reusable_upload_web.yml + with: + CONCURRENCY: pr-${{ github.event.pull_request.number }} + RRD_ARTIFACT_NAME: linux-rrd-fast + UPLOAD_COMMIT_OVERRIDE: ${{ github.event.pull_request.head.sha }} + secrets: inherit + + # run-notebook: + # name: 'Run Notebook' + # needs: [min-test-wheel] + # uses: ./.github/workflows/reusable_run_notebook.yml + # with: + # CONCURRENCY: pr-${{ github.event.pull_request.number }} + # WHEEL_ARTIFACT_NAME: linux-wheel-fast + # UPLOAD_COMMIT_OVERRIDE: ${{ github.event.pull_request.head.sha }} + # secrets: inherit + + save-pr-summary: + name: "Save PR Summary" + needs: [upload-web] #run-notebook] + uses: ./.github/workflows/reusable_pr_summary.yml + with: + CONCURRENCY: pr-${{ github.event.pull_request.number }} + PR_NUMBER: ${{ github.event.pull_request.number }} + secrets: inherit diff --git a/.github/workflows/on_push_main.yml b/.github/workflows/on_push_main.yml new file mode 100644 index 000000000000..59d759f63aa3 --- /dev/null +++ b/.github/workflows/on_push_main.yml @@ -0,0 +1,189 @@ +name: Push To Main + +on: + push: + branches: + - "main" + +jobs: + checks: + name: Checks + uses: ./.github/workflows/reusable_checks.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + secrets: inherit + + benches: + name: Benchmarks + uses: ./.github/workflows/reusable_bench.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + SAVE_BENCHES: true + BENCH_NAME: main + COMPARE_TO: main + secrets: inherit + + deploy-docs: + needs: [checks, benches] + name: Deploy Docs + uses: ./.github/workflows/reusable_deploy_docs.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + PY_DOCS_VERSION_NAME: "HEAD" + UPDATE_LATEST: false + secrets: inherit + + build-web: + name: 'Build Web' + uses: ./.github/workflows/reusable_build_web.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + secrets: inherit + + upload-web: + name: 'Upload Web' + needs: [build-linux, build-web] + uses: ./.github/workflows/reusable_upload_web.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + RRD_ARTIFACT_NAME: linux-rrd + MARK_PRERELEASE_FOR_MAINLINE: true + secrets: inherit + + build-linux: + needs: [checks] + name: 'Linux: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: push-linux-${{ github.ref_name }} + PLATFORM: linux + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + build-windows: + needs: [checks] + name: 'Windows: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: push-windows-${{ github.ref_name }} + PLATFORM: windows + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: '' + secrets: inherit + + build-macos-arm: + needs: [checks] + name: 'Macos-Arm: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: push-macos-arm-${{ github.ref_name }} + PLATFORM: macos-arm + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: '' + secrets: inherit + + build-macos-intel: + needs: [checks] + name: 'Macos-Intel: Build/Test Wheels' + uses: ./.github/workflows/reusable_build_and_test_wheels.yml + with: + CONCURRENCY: push-macos-intel-${{ github.ref_name }} + PLATFORM: macos-intel + WHEEL_ARTIFACT_NAME: 'macos-intel-wheel' + RRD_ARTIFACT_NAME: '' + secrets: inherit + + upload-wheels-linux: + name: 'Linux: Upload Wheels' + needs: [build-linux] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: push-linux-${{ github.ref_name }} + WHEEL_ARTIFACT_NAME: linux-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-windows: + name: 'Windows: Upload Wheels' + needs: [build-linux, build-windows] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: push-windows-${{ github.ref_name }} + WHEEL_ARTIFACT_NAME: windows-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-arm: + name: 'Macos-Arm: Upload Wheels' + needs: [build-linux, build-macos-arm] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: push-macos-arm-${{ github.ref_name }} + WHEEL_ARTIFACT_NAME: macos-arm-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + upload-wheels-macos-intel: + name: 'Macos-Intel: Upload Wheels' + needs: [build-linux, build-macos-intel] + uses: ./.github/workflows/reusable_upload_wheels.yml + with: + CONCURRENCY: push-macos-intel-${{ github.ref_name }} + WHEEL_ARTIFACT_NAME: macos-intel-wheel + RRD_ARTIFACT_NAME: linux-rrd + secrets: inherit + + generate-pip-index: + name: 'Generate Pip Index' + needs: [upload-wheels-linux, upload-wheels-windows, upload-wheels-macos-arm, upload-wheels-macos-intel] + uses: ./.github/workflows/reusable_pip_index.yml + with: + CONCURRENCY: push-${{ github.ref_name }} + secrets: inherit + + pre-release: + name: Pre Release + concurrency: + group: push-${{ github.ref_name }}-prerelease + cancel-in-progress: true + needs: [upload-web, generate-pip-index] + runs-on: "ubuntu-latest" + steps: + - name: Add SHORT_SHA env property with commit short sha + run: echo "SHORT_SHA=`echo ${{github.sha}} | cut -c1-7`" >> $GITHUB_ENV + + # First delete the old prerelease. If we don't do this, we don't get things like + # proper source-archives and changelog info. + # https://github.com/dev-drprasad/delete-tag-and-release + - uses: dev-drprasad/delete-tag-and-release@v0.2.1 + with: + tag_name: prerelease + delete_release: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Create the actual prerelease + # https://github.com/ncipollo/release-action + - name: GitHub Release + uses: ncipollo/release-action@v1.12.0 + with: + body: | + This is a prerelease. It is not intended for production use. + Please report any issues you find. + + ## Example Hosted App + https://app.rerun.io/commit/${{ env.SHORT_SHA }} + + ## Wheels can be installed with: + ``` + pip install --pre -f https://build.rerun.io/commit/${{ env.SHORT_SHA}}/wheels --upgrade rerun-sdk + ``` + prerelease: true + name: "Development Build" + tag: "prerelease" + token: ${{ secrets.GITHUB_TOKEN }} + generateReleaseNotes: true + allowUpdates: true + removeArtifacts: true + replacesArtifacts: true diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml deleted file mode 100644 index 491c5353550a..000000000000 --- a/.github/workflows/python.yml +++ /dev/null @@ -1,629 +0,0 @@ -name: CI (Python) - -on: - pull_request: - types: [labeled, synchronize, opened] - push: - branches: - - "main" - tags: - - "v*.*.*" # on release tag - workflow_dispatch: - inputs: - force_build_wheel: - description: "Build python wheels" - required: true - default: false - type: boolean - -env: - PYTHON_VERSION: "3.8" - PRE_RELEASE_INSTRUCTIONS: | - ## Installing the pre-release Python SDK - 1. Download the correct `.whl`. - 2. Run `pip install rerun_sdk<...>.whl` (replace `<...>` with the actual filename) - 3. Test it: `rerun --version` - UBUNTU_REQUIRED_PKGS: libgtk-3-dev libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev libxkbcommon-dev libssl-dev libfontconfig1-dev libatk-bridge2.0 libfreetype6-dev libglib2.0-dev - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} # Cancel previous CI jobs only on pull-requests - cancel-in-progress: true - -jobs: - lint: - name: Python lints (black, mypy, flake8) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - uses: extractions/setup-just@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - just-version: 1.5 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_VERSION }} - cache: "pip" - cache-dependency-path: "rerun_py/requirements-lint.txt" - - - name: Install Python dependencies - run: | - pip install --upgrade pip - pip install -r rerun_py/requirements-lint.txt - - - name: Lint Python - run: | - just py-lint - - - name: Check requirements - run: | - just py-requirements - - # --------------------------------------------------------------------------- - # We need one wheel-build to be special so the other builds (namely mac arm) can use its rrd - # This copy-paste is awful, but we'll refactor the build soon. - wheels-linux: - if: github.event_name == 'push' || github.event.inputs.force_build_wheel - name: Build Python Wheels (Linux) - runs-on: ubuntu-latest - container: - image: rerunio/ci_docker:0.6 - steps: - - uses: actions/checkout@v3 - - # These should already be in the docker container, but run for good measure. A no-op install - # should be fast, and this way things don't break if we add new packages without rebuilding - # docker - - name: Cache APT Packages - uses: awalsh128/cache-apt-pkgs-action@v1.2.2 - with: - packages: ${{ env.UBUNTU_REQUIRED_PKGS }} - version: 2.0 # Increment this to pull newer packages - execute_install_scripts: true - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # Don't update the cache -- it will be updated by the lint job - # TODO(jleibs): this job will likely run before rust.yml updates - # the cache. Better cross-job sequencing would be nice here - save-if: False - - # These should already be in the docker container, but run for good measure. A no-op install - # should be fast, and this way things don't break if we add new packages without rebuilding - # docker - - run: pip install -r rerun_py/requirements-build.txt - - # ---------------------------------------------------------------------------------- - - - name: Patch Cargo.toml for pre-release - if: github.ref == 'refs/heads/main' - # After patching the pre-release version, run cargo update. - # This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing - run: | - python3 scripts/version_util.py --patch_prerelease - cargo update -w - - - name: Version check for tagged-release - if: startsWith(github.ref, 'refs/tags/v') - # This call to version_util.py will assert version from Cargo.toml matches git tagged version vX.Y.Z - run: | - python3 scripts/version_util.py --check_version - - - name: Store the expected version - # Find the current cargo version and store it in the GITHUB_ENV var: `expected_version` - shell: bash - run: | - echo "expected_version=$(python3 scripts/version_util.py --bare_cargo_version)" >> $GITHUB_ENV - - - name: Build Wheel - uses: PyO3/maturin-action@v1 - with: - maturin-version: "0.14.10" - manylinux: manylinux_2_31 - container: off - command: build - args: | - --manifest-path rerun_py/Cargo.toml - --release - --target x86_64-unknown-linux-gnu - --no-default-features - --features pypi - --out pre-dist - - - name: Install wheel dependencies - # First we install the dependencies manually so we can use `--no-index` when installing the wheel. - # This needs to be a separate step for some reason or the following step fails - # TODO(jleibs): pull these deps from pyproject.toml - # TODO(jleibs): understand why deps can't be installed in the same step as the wheel - shell: bash - run: | - pip install deprecated numpy>=1.23 pyarrow==10.0.1 - - - name: Install built wheel - # Now install the wheel using a specific version and --no-index to guarantee we get the version from - # the pre-dist folder. Note we don't use --force-reinstall here because --no-index means it wouldn't - # find the dependencies to reinstall them. - shell: bash - run: | - pip uninstall rerun-sdk - pip install rerun-sdk==${{ env.expected_version }} --no-index --find-links pre-dist - - - name: Verify built wheel version - shell: bash - run: | - python3 -m rerun --version - which rerun - rerun --version - - - name: Run unit tests - shell: bash - run: cd rerun_py/tests && pytest - - - name: Run e2e test - shell: bash - run: RUST_LOG=debug scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed - - - name: Unpack the wheel - shell: bash - run: | - mkdir unpack-dist - wheel unpack pre-dist/*.whl --dest unpack-dist - - - name: Get the folder name - shell: bash - run: | - echo "pkg_folder=$(ls unpack-dist)" >> $GITHUB_ENV - - - name: Cache RRD dataset - id: dataset - uses: actions/cache@v3 - with: - path: examples/python/colmap/dataset/ - # TODO(jleibs): Derive this key from the invocation below - key: colmap-dataset-colmap-fiat-v0 - - - name: Generate Embedded RRD file - shell: bash - # If you change the line below you should almost definitely change the `key:` line above by giving it a new, unique name - run: | - mkdir rrd - pip install -r examples/python/colmap/requirements.txt - python3 examples/python/colmap/main.py --dataset colmap_fiat --resize 800x600 --save rrd/colmap_fiat.rrd - cp rrd/colmap_fiat.rrd unpack-dist/${{ env.pkg_folder }}/rerun_sdk/rerun_demo/colmap_fiat.rrd - - - name: Repack the wheel - shell: bash - run: | - mkdir dist - wheel pack unpack-dist/${{ env.pkg_folder }} --dest dist/ - - - name: Upload wheels - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - # All platforms are currently creating the same rrd file, upload one of them - - name: Save RRD artifact - uses: actions/upload-artifact@v3 - with: - name: rrd - path: rrd - - # --------------------------------------------------------------------------- - matrix-setup: - # Building all the wheels is expensive, so we only run this job when we push (to main or release tags), - # or if the job was manually triggered with `force_build_wheel` set to true. - if: github.event_name == 'push' || github.event.inputs.force_build_wheel - runs-on: ubuntu-latest - - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - - steps: - - name: Dump GitHub context - env: - GITHUB_CONTEXT: ${{ toJson(github) }} - run: echo "$GITHUB_CONTEXT" - - name: Dump job context - env: - JOB_CONTEXT: ${{ toJson(job) }} - run: echo "$JOB_CONTEXT" - - - id: set-matrix - shell: bash - # TODO(jleibs): figure out why tests are failing to complete on `x86_64-apple-darwin` - # See: https://github.com/rerun-io/rerun/pull/1853 - run: | - matrix=() - matrix+=('{"platform": "macos", "target": "x86_64-apple-darwin", "run_tests": false, "runs_on": "macos-latest" },') - matrix+=('{"platform": "macos", "target": "aarch64-apple-darwin", "run_tests": false, "runs_on": "macos-latest" },') # NOTE: we can't run tests on arm since our macos runner is x86_64 - matrix+=('{"platform": "windows", "target": "x86_64-pc-windows-msvc", "run_tests": true, "runs_on": "windows-latest-8-cores"},') - - echo "Matrix values: ${matrix[@]}" - - echo "matrix={\"include\":[${matrix[@]}]}" >> $GITHUB_OUTPUT - - wheels: - name: Build Remaining Python Wheels - needs: [lint, matrix-setup, wheels-linux] - - strategy: - matrix: ${{fromJson(needs.matrix-setup.outputs.matrix)}} - - runs-on: ${{ matrix.runs_on }} - - steps: - - uses: actions/checkout@v3 - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # Don't update the cache -- it will be updated by the lint job - # TODO(jleibs): this job will likely run before rust.yml updates - # the cache. Better cross-job sequencing would be nice here - save-if: False - - # The pip-cache setup logic doesn't work in the ubuntu docker container - # That's probably fine since we bake these deps into the container already - - name: Setup python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_VERSION }} - cache: "pip" - cache-dependency-path: "rerun_py/requirements-build.txt" - - # These should already be in the docker container, but run for good measure. A no-op install - # should be fast, and this way things don't break if we add new packages without rebuilding - # docker - - run: pip install -r rerun_py/requirements-build.txt - - # ---------------------------------------------------------------------------------- - # Install prerequisites for building the web-viewer Wasm - - # We have a nice script for that: ./scripts/setup_web.sh - # Unfortunately, we can't run that on Windows, because Windows doesn't come with - # a package manager like grown-up OSes do (at least not the CI version of Windows). - # Also we can't run it on linux because the 20.04 Docker container will install - # an old version of binaryen/wasm-opt that barfs on the `--fast-math` flag - # So we only run the script on macos, and then on Windows we do the parts of the script manually. - # On ubuntu, the correct packages are pre-installed in our docker container. - - - name: Install prerequisites for building the web-viewer Wasm (non-Windows) - if: matrix.platform == 'macos' - shell: bash - run: ./scripts/setup_web.sh - - # The first steps of setup_web.sh, for Windows: - - name: Install wasm32 and wasm-bindgen-cli for building the web-viewer Wasm on windows - if: matrix.platform == 'windows' - shell: bash - run: rustup target add wasm32-unknown-unknown && cargo install wasm-bindgen-cli --version 0.2.84 - - # The last step of setup_web.sh, for Windows. - # Since 'winget' is not available within the GitHub runner, we download the package directly: - # See: https://github.com/marketplace/actions/engineerd-configurator - - name: Install binaryen for building the web-viewer Wasm on windows - if: matrix.platform == 'windows' - uses: engineerd/configurator@v0.0.9 - with: - name: "wasm-opt.exe" - url: "https://github.com/WebAssembly/binaryen/releases/download/version_111/binaryen-version_111-x86_64-windows.tar.gz" - pathInArchive: "binaryen-version_111/bin/wasm-opt.exe" - - # ---------------------------------------------------------------------------------- - - - name: Patch Cargo.toml for pre-release - if: github.ref == 'refs/heads/main' - # After patching the pre-release version, run cargo update. - # This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing - run: | - python3 scripts/version_util.py --patch_prerelease - cargo update -w - - - name: Version check for tagged-release - if: startsWith(github.ref, 'refs/tags/v') - # This call to version_util.py will assert version from Cargo.toml matches git tagged version vX.Y.Z - run: | - python3 scripts/version_util.py --check_version - - - name: Store the expected version - # Find the current cargo version and store it in the GITHUB_ENV var: `expected_version` - shell: bash - run: | - echo "expected_version=$(python3 scripts/version_util.py --bare_cargo_version)" >> $GITHUB_ENV - - - name: Build Wheel - uses: PyO3/maturin-action@v1 - with: - maturin-version: "0.14.10" - manylinux: manylinux_2_31 - container: off - command: build - args: | - --manifest-path rerun_py/Cargo.toml - --release - --target ${{ matrix.target }} - --no-default-features - --features pypi - --out pre-dist - - - name: Install built wheel - if: ${{ matrix.run_tests }} - # First we install the dependencies manually so we can use `--no-index` when installing the wheel. - # Then install the wheel using a specific version and --no-index to guarantee we get the version from - # the pre-dist folder. Note we don't use --force-reinstall here because --no-index means it wouldn't - # find the dependencies to reinstall them. - # TODO(jleibs): pull these deps from pyproject.toml - shell: bash - run: | - pip uninstall rerun-sdk - pip install deprecated numpy>=1.23 pyarrow==10.0.1 - pip install rerun-sdk==${{ env.expected_version }} --no-index --find-links pre-dist - - - name: Verify built wheel version - if: ${{ matrix.run_tests }} - shell: bash - run: | - python3 -m rerun --version - which rerun - rerun --version - - - name: Run unit tests - if: ${{ matrix.run_tests }} - shell: bash - run: cd rerun_py/tests && pytest - - - name: Run e2e test - if: ${{ matrix.run_tests }} - shell: bash - run: RUST_LOG=debug scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed - - - name: Unpack the wheel - shell: bash - run: | - mkdir unpack-dist - wheel unpack pre-dist/*.whl --dest unpack-dist - - - name: Get the folder name - shell: bash - run: | - echo "pkg_folder=$(ls unpack-dist)" >> $GITHUB_ENV - - - name: Download RRD - uses: actions/download-artifact@v3 - with: - name: rrd - path: rrd - - - name: Insert the rrd - shell: bash - # If you change the line below you should almost definitely change the `key:` line above by giving it a new, unique name - run: | - cp rrd/colmap_fiat.rrd unpack-dist/${{ env.pkg_folder }}/rerun_sdk/rerun_demo/colmap_fiat.rrd - - - name: Repack the wheel - shell: bash - run: | - mkdir dist - wheel pack unpack-dist/${{ env.pkg_folder }} --dest dist/ - - - name: Upload wheels - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - # --------------------------------------------------------------------------- - - upload_rrd: - name: Upload RRD to GCloud - permissions: - contents: "read" - id-token: "write" - needs: [wheels] - runs-on: "ubuntu-latest" - steps: - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: rrd - path: rrd - - - id: "auth" - uses: google-github-actions/auth@v1 - with: - workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} - service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} - - - name: Add SHORT_SHA env property with commit short sha - run: echo "SHORT_SHA=`echo ${{github.sha}} | cut -c1-7`" >> $GITHUB_ENV - - - name: "Upload RRD (commit)" - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "rrd" - destination: "rerun-example-rrd/commit/${{env.SHORT_SHA}}" - parent: false - - - name: "Upload RRD (prerelease)" - if: "!startsWith(github.ref , 'refs/tags/v')" - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "rrd" - destination: "rerun-example-rrd/prerelease" - parent: false - - - name: "Upload RRD (tagged)" - if: startsWith(github.ref, 'refs/tags/v') - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "rrd" - destination: "rerun-example-rrd/version/${{github.ref_name}}" - parent: false - - - name: "Upload RRD (latest release)" - if: github.ref == 'latest' - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "rrd" - destination: "rerun-example-rrd/latest" - parent: false - - # See https://github.com/ncipollo/release-action - pre-release: - name: Pre Release - needs: [wheels] - if: github.ref == 'refs/heads/main' - runs-on: "ubuntu-latest" - steps: - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: wheels - path: dist - - # First delete the old prerelease. If we don't do this, we don't get things like - # proper source-archives and changelog info. - # https://github.com/dev-drprasad/delete-tag-and-release - - uses: dev-drprasad/delete-tag-and-release@v0.2.1 - with: - tag_name: prerelease - delete_release: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # Create the actual prerelease - # https://github.com/ncipollo/release-action - - name: GitHub Release - uses: ncipollo/release-action@v1.12.0 - with: - body: ${{ env.PRE_RELEASE_INSTRUCTIONS }} - prerelease: true - artifacts: dist/* - name: "Development Build" - tag: "prerelease" - token: ${{ secrets.GITHUB_TOKEN }} - generateReleaseNotes: true - allowUpdates: true - removeArtifacts: true - replacesArtifacts: true - - # --------------------------------------------------------------------------- - - # This job is run on tags starting with "v", e.g., "v0.1.0" - tagged-release: - name: Release - runs-on: ubuntu-latest - if: startsWith(github.ref, 'refs/tags/v') - needs: [wheels] - steps: - - name: Download Artifact - uses: actions/download-artifact@v3 - with: - name: wheels - path: dist - - # https://github.com/ncipollo/release-action - - name: GitHub Release - uses: ncipollo/release-action@v1.12.0 - with: - prerelease: true - artifacts: dist/* - token: ${{ secrets.GITHUB_TOKEN }} - generateReleaseNotes: true - - - name: Publish to PyPI - uses: PyO3/maturin-action@v1 - env: - # These are both set in the GitHub project configuration - MATURIN_REPOSITORY: ${{ vars.PYPI_REPOSITORY }} - MATURIN_PYPI_TOKEN: ${{ secrets.MATURIN_PYPI_TOKEN }} - with: - command: upload - args: --skip-existing dist/* - - # --------------------------------------------------------------------------- - - py-test-docs: - name: Verify the docs build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "pip" - cache-dependency-path: "rerun_py/requirements-doc.txt" - - - name: Install Python dependencies - run: | - pip install --upgrade pip - pip install -r rerun_py/requirements-doc.txt - - - name: Build via mkdocs - run: | - mkdocs build -f rerun_py/mkdocs.yml - - py-docs: - name: Build and deploy docs - if: ${{ github.event_name == 'push' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # Don't do a shallow clone - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "pip" - cache-dependency-path: "rerun_py/requirements-doc.txt" - - - name: Install Python dependencies - run: | - pip install --upgrade pip - pip install -r rerun_py/requirements-doc.txt - - - name: Set up git author - run: | - remote_repo="https://${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" - git config --global user.name "${GITHUB_ACTOR}" - git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # Mike will incrementally update the existing gh-pages branch - # We then check it out, and reset it to a new orphaned branch, which we force-push to origin - # to make sure we don't accumulate unnecessary history in gh-pages branch - - name: Deploy via mike # https://github.com/jimporter/mike - if: startsWith(github.ref, 'refs/tags/v') - run: | - git fetch - mike deploy -F rerun_py/mkdocs.yml --rebase -b gh-pages --prefix docs/python -u ${{github.ref_name}} latest - git checkout gh-pages - git checkout --orphan gh-pages-orphan - git commit -m "Update docs for ${GITHUB_SHA}" - git push origin gh-pages-orphan:gh-pages -f - - # Mike will incrementally update the existing gh-pages branch - # We then check it out, and reset it to a new orphaned branch, which we force-push to origin - # to make sure we don't accumulate unnecessary history in gh-pages branch - - name: Deploy tag via mike # https://github.com/jimporter/mike - if: github.ref == 'refs/heads/main' - run: | - git fetch - mike deploy -F rerun_py/mkdocs.yml --rebase -b gh-pages --prefix docs/python HEAD - git checkout gh-pages - git checkout --orphan gh-pages-orphan - git commit -m "Update docs for ${GITHUB_SHA}" - git push origin gh-pages-orphan:gh-pages -f diff --git a/.github/workflows/reusable_bench.yml b/.github/workflows/reusable_bench.yml new file mode 100644 index 000000000000..d155af72ce72 --- /dev/null +++ b/.github/workflows/reusable_bench.yml @@ -0,0 +1,164 @@ +name: Reusable Bench + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + SAVE_BENCHES: + required: false + type: boolean + default: false + BENCH_NAME: + required: false + type: string + default: '' + COMPARE_TO: + required: false + type: string + default: '' + FAIL_ON_ALERT: + required: false + type: boolean + default: false + +concurrency: + group: ${{ inputs.CONCURRENCY }}-bench + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.8" + # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses + # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html + # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings + + # See https://github.com/ericseppanen/cargo-cranky/issues/8 + RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs + + # See: https://github.com/marketplace/actions/sccache-action + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + +# --------------------------------------------------------------------------- + + rs-benchmarks: + name: Rust Criterion benchmarks + + permissions: + # contents permission to update benchmark contents in gh-pages branch + contents: write + id-token: "write" + # deployments permission to deploy GitHub pages website + deployments: write + + runs-on: ubuntu-latest-16-cores + + container: + image: rerunio/ci_docker:0.6 + + steps: + - uses: actions/checkout@v3 + + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-linux" + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # Don't update the cache -- it will be updated by the lint job + # TODO(jleibs): this job will likely run before rust.yml updates + # the cache. Better cross-job sequencing would be nice here + save-if: false + + # Sccache will cache everything else + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + - name: Add SHORT_SHA env property with commit short sha + run: echo "SHORT_SHA=`echo ${{github.sha}} | cut -c1-7`" >> $GITHUB_ENV + + - name: Run benchmark + # Use bash shell so we get pipefail behavior with tee + shell: bash + run: | + cargo bench \ + --all-features \ + -p re_arrow_store \ + -p re_data_store \ + -p re_log_encoding \ + -p re_query \ + -p re_tuid \ + -- --output-format=bencher | tee /tmp/${{ env.SHORT_SHA }} + + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: 'Set up Cloud SDK' + uses: 'google-github-actions/setup-gcloud@v1' + with: + version: '>= 363.0.0' + + # TODO(jleibs) make this whole thing a python script + - name: "Upload bench to GCS based on SHA" + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: /tmp/${{ env.SHORT_SHA }} + destination: "rerun-builds/benches/" + + - name: Download comparison bench from GCS + if: ${{ inputs.COMPARE_TO != '' }} + run: | + mkdir /tmp/compare/ + gsutil cp gs://rerun-builds/benches/${{inputs.COMPARE_TO}} /tmp/compare/${{ inputs.COMPARE_TO }} + + - name: Compare results with benchcmp + if: ${{ inputs.COMPARE_TO != '' }} + run: cargo benchcmp /tmp/compare/${{ inputs.COMPARE_TO }} /tmp/${{ env.SHORT_SHA }} > /tmp/bench_results.txt + + - name: "Upload bench-results to GCS" + if: ${{ inputs.COMPARE_TO != '' }} + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: /tmp/bench_results.txt + destination: "rerun-builds/commit/${{env.SHORT_SHA}}/" + + - name: Store benchmark result + # https://github.com/benchmark-action/github-action-benchmark + uses: benchmark-action/github-action-benchmark@v1 + with: + name: Rust Benchmark + tool: "cargo" + output-file-path: /tmp/${{ env.SHORT_SHA }} + github-token: ${{ secrets.GITHUB_TOKEN }} + + # Show alert with commit comment on detecting possible performance regression + comment-on-alert: true + alert-threshold: "125%" + fail-on-alert: ${{ inputs.FAIL_ON_ALERT }} + comment-always: false # Generates too much GitHub notification spam + + # Save, results and push to GitHub only on main + save-data-file: ${{ inputs.SAVE_BENCHES }} + auto-push: ${{ inputs.SAVE_BENCHES }} + gh-pages-branch: gh-pages + benchmark-data-dir-path: dev/bench + max-items-in-chart: 30 + + - name: "Copy bench to named file" + if: ${{ inputs.BENCH_NAME != '' }} + run: cp /tmp/${{ env.SHORT_SHA }} /tmp/${{ inputs.BENCH_NAME }} + + # Don't upload the new named bench until the end in case the names are the same + - name: "Upload named bench to GCS" + if: ${{ inputs.BENCH_NAME != '' }} + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: /tmp/${{ inputs.BENCH_NAME }} + destination: "rerun-builds/benches/" diff --git a/.github/workflows/reusable_build_and_test_wheels.yml b/.github/workflows/reusable_build_and_test_wheels.yml new file mode 100644 index 000000000000..753eac83b394 --- /dev/null +++ b/.github/workflows/reusable_build_and_test_wheels.yml @@ -0,0 +1,300 @@ +name: Reusable Build and Test Wheels + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + MATURIN_FEATURE_FLAGS: + required: false + type: string + default: '--no-default-features --features pypi' + PLATFORM: + required: true + type: string + RELEASE_VERSION: + required: false + type: string + default: 'prerelease' + RRD_ARTIFACT_NAME: + required: false + type: string + default: '' + SAVE_CACHE: + required: false + type: boolean + default: false + WHEEL_ARTIFACT_NAME: + required: false + type: string + default: '' + +concurrency: + group: ${{ inputs.CONCURRENCY }}-build-wheels + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.8" + # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses + # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html + # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + + # TODO(jleibs) --deny warnings causes installation of wasm-bindgen to fail on mac + # RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings + RUSTFLAGS: --cfg=web_sys_unstable_apis + + # See https://github.com/ericseppanen/cargo-cranky/issues/8 + RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs + + # See: https://github.com/marketplace/actions/sccache-action + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + +# --------------------------------------------------------------------------- + + set-config: + name: Set Config + runs-on: ubuntu-latest + outputs: + RUNNER: ${{ steps.set-config.outputs.runner }} + TARGET: ${{ steps.set-config.outputs.target }} + RUN_TESTS: ${{ steps.set-config.outputs.run_tests }} + CONTAINER: ${{ steps.set-config.outputs.container }} + steps: + - name: Set runner and target based on platform + id: set-config + run: | + case "${{ inputs.platform }}" in + linux) + runner="ubuntu-latest" + target="x86_64-unknown-linux-gnu" + run_tests="true" + container="{'image': 'rerunio/ci_docker:0.6'}" + ;; + windows) + runner="windows-latest" + target="x86_64-pc-windows-msvc" + run_tests="true" + container="null" + ;; + macos-arm) + runner="macos-latest" + target="aarch64-apple-darwin" + run_tests="false" + container="null" + ;; + macos-intel) + runner="macos-latest" + target="x86_64-apple-darwin" + run_tests="false" + container="null" + ;; + *) echo "Invalid platform" && exit 1 ;; + esac + echo "runner=$runner" >> "$GITHUB_OUTPUT" + echo "target=$target" >> "$GITHUB_OUTPUT" + echo "run_tests=$run_tests" >> "$GITHUB_OUTPUT" + echo "container=$container" >> "$GITHUB_OUTPUT" + +# --------------------------------------------------------------------------- + + build-wheels: + name: Build Wheels + + needs: [set-config] + + runs-on: ${{ needs.set-config.outputs.RUNNER }} + container: ${{ fromJson(needs.set-config.outputs.CONTAINER) }} + + steps: + - name: Show context + run: | + echo "GITHUB_CONTEXT": $GITHUB_CONTEXT + echo "JOB_CONTEXT": $JOB_CONTEXT + echo "INPUTS_CONTEXT": $INPUTS_CONTEXT + echo "ENV_CONTEXT": $ENV_CONTEXT + env: + ENV_CONTEXT: ${{ toJson(env) }} + GITHUB_CONTEXT: ${{ toJson(github) }} + JOB_CONTEXT: ${{ toJson(job) }} + INPUTS_CONTEXT: ${{ toJson(inputs) }} + + - uses: actions/checkout@v3 + + # Rust-cache will cache our dependencies, which is a large chunk of the build + # See: https://github.com/Swatinem/rust-cache + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-${{ inputs.PLATFORM }}" + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # Don't update the cache -- it will be updated by the lint job + # TODO(jleibs): this job will likely run before rust.yml updates + # the cache. Better cross-job sequencing would be nice here + save-if: ${{ inputs.SAVE_CACHE }} + + # Sccache will cache everything else + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + # The pip-cache setup logic doesn't work in the ubuntu docker container + # That's probably fine since we bake these deps into the container already + - name: Setup python + if: ${{ inputs.PLATFORM != 'linux' }} + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: "pip" + cache-dependency-path: "rerun_py/requirements-build.txt" + + # These should already be in the docker container, but run for good measure. A no-op install + # should be fast, and this way things don't break if we add new packages without rebuilding + # docker + - run: pip install -r rerun_py/requirements-build.txt + + # We have a nice script for that: ./scripts/setup_web.sh + # Unfortunately, we can't run that on Windows, because Windows doesn't come with + # a package manager like grown-up OSes do (at least not the CI version of Windows). + # Also we can't run it on linux because the 20.04 Docker container will install + # an old version of binaryen/wasm-opt that barfs on the `--fast-math` flag + # So we only run the script on macos, and then on Windows we do the parts of the script manually. + # On ubuntu, the correct packages are pre-installed in our docker container. + + - name: Install prerequisites for building the web-viewer Wasm (non-Windows) + if: (inputs.PLATFORM == 'macos-intel') || (inputs.PLATFORM == 'macos-arm') + shell: bash + run: ./scripts/setup_web.sh + + # The first steps of setup_web.sh, for Windows: + - name: Install wasm32 and wasm-bindgen-cli for building the web-viewer Wasm on windows + if: inputs.platform == 'windows' + shell: bash + run: rustup target add wasm32-unknown-unknown && cargo install wasm-bindgen-cli --version 0.2.84 + + # The last step of setup_web.sh, for Windows. + # Since 'winget' is not available within the GitHub runner, we download the package directly: + # See: https://github.com/marketplace/actions/engineerd-configurator + - name: Install binaryen for building the web-viewer Wasm on windows + if: inputs.platform == 'windows' + uses: engineerd/configurator@v0.0.9 + with: + name: "wasm-opt.exe" + url: "https://github.com/WebAssembly/binaryen/releases/download/version_111/binaryen-version_111-x86_64-windows.tar.gz" + pathInArchive: "binaryen-version_111/bin/wasm-opt.exe" + + - name: Patch Cargo.toml for pre-release + if: ${{ inputs.RELEASE_VERSION == 'prerelease' }} + # After patching the pre-release version, run cargo update. + # This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing + run: | + python3 scripts/version_util.py --patch_prerelease + cargo update -w + + - name: Store the expected version + # Find the current cargo version and store it in the output: `expected_version` + shell: bash + id: expected_version + run: | + echo "EXPECTED_VERSION=$(python3 scripts/version_util.py --bare_cargo_version)" >> "$GITHUB_OUTPUT" + + - name: Check the expected version + if: ${{ inputs.RELEASE_VERSION != 'prerelease' }} + run: | + if [ "${{steps.expected_version.outputs.EXPECTED_VERSION}}" != "${{inputs.RELEASE_VERSION}}" ]; then + echo "Error: EXPECTED_VERSION (${{steps.expected_version.outputs.EXPECTED_VERSION}}) does not match RELEASE_VERSION (${{inputs.RELEASE_VERSION}})" + exit 1 + fi + + - name: Build Wheel + uses: PyO3/maturin-action@v1 + with: + maturin-version: "0.14.10" + manylinux: manylinux_2_31 + container: off + command: build + sccache: 'true' + args: | + --manifest-path rerun_py/Cargo.toml + --release + --target ${{ needs.set-config.outputs.TARGET }} + ${{ inputs.MATURIN_FEATURE_FLAGS }} + --out dist + + - name: Save wheel artifact + if: ${{ inputs.WHEEL_ARTIFACT_NAME != '' }} + uses: actions/upload-artifact@v3 + with: + name: ${{inputs.WHEEL_ARTIFACT_NAME}} + path: dist + + # --------------------------------------------------------------------------- + # Test the wheel + + - name: Install wheel dependencies + if: needs.set-config.outputs.RUN_TESTS == 'true' + # First we install the dependencies manually so we can use `--no-index` when installing the wheel. + # This needs to be a separate step for some reason or the following step fails + # TODO(jleibs): pull these deps from pyproject.toml + # TODO(jleibs): understand why deps can't be installed in the same step as the wheel + shell: bash + run: | + pip install deprecated numpy>=1.23 pyarrow==10.0.1 pytest==7.1.2 + + - name: Install built wheel + if: needs.set-config.outputs.RUN_TESTS == 'true' + # Now install the wheel using a specific version and --no-index to guarantee we get the version from + # the pre-dist folder. Note we don't use --force-reinstall here because --no-index means it wouldn't + # find the dependencies to reinstall them. + shell: bash + run: | + pip uninstall rerun-sdk + pip install rerun-sdk==${{ steps.expected_version.outputs.EXPECTED_VERSION }} --no-index --find-links dist + + - name: Verify built wheel version + if: needs.set-config.outputs.RUN_TESTS == 'true' + shell: bash + run: | + python3 -m rerun --version + which rerun + rerun --version + + - name: Run unit tests + if: needs.set-config.outputs.RUN_TESTS == 'true' + shell: bash + run: cd rerun_py/tests && pytest + + - name: Run e2e test + if: needs.set-config.outputs.RUN_TESTS == 'true' + shell: bash + run: RUST_LOG=debug scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed + + - name: Cache RRD dataset + if: needs.set-config.outputs.RUN_TESTS == 'true' + id: dataset + uses: actions/cache@v3 + with: + path: examples/python/colmap/dataset/ + # TODO(jleibs): Derive this key from the invocation below + key: colmap-dataset-colmap-fiat-v0 + + - name: Generate Embedded RRD file + if: needs.set-config.outputs.RUN_TESTS == 'true' + shell: bash + # If you change the line below you should almost definitely change the `key:` line above by giving it a new, unique name + run: | + mkdir rrd + pip install -r examples/python/colmap/requirements.txt + python3 examples/python/colmap/main.py --dataset colmap_fiat --resize 800x600 --save rrd/colmap_fiat.rrd + + # All platforms are currently creating the same rrd file, upload one of them + - name: Save RRD artifact + if: ${{ (needs.set-config.outputs.RUN_TESTS == 'true') && (inputs.RRD_ARTIFACT_NAME != '') }} + uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.RRD_ARTIFACT_NAME }} + path: rrd + diff --git a/.github/workflows/reusable_build_web.yml b/.github/workflows/reusable_build_web.yml new file mode 100644 index 000000000000..21b085356689 --- /dev/null +++ b/.github/workflows/reusable_build_web.yml @@ -0,0 +1,94 @@ +name: Reusable Build Web + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + RELEASE_VERSION: + required: false + type: string + default: 'prerelease' + EXTRA_FLAGS: + required: false + type: string + default: '' + +concurrency: + group: ${{ inputs.CONCURRENCY }}-build-web + cancel-in-progress: true + +env: + # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses + # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html + # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings + + # See https://github.com/ericseppanen/cargo-cranky/issues/8 + RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs + +jobs: + + rs-build-web-viewer: + name: Build Web (wasm32 + wasm-bindgen) + permissions: + contents: "read" + id-token: "write" + + runs-on: ubuntu-latest-16-cores + + container: + image: rerunio/ci_docker:0.6 + + steps: + - uses: actions/checkout@v3 + + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: 1.67.0 + target: wasm32-unknown-unknown + override: true + + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # See: https://github.com/rerun-io/rerun/pull/497 + save-if: false + + - name: Patch Cargo.toml for pre-release + if: ${{ inputs.RELEASE_VERSION == 'prerelease' }} + # After patching the pre-release version, run cargo update. + # This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing + run: | + python3 scripts/version_util.py --patch_prerelease + cargo update -w + + - name: Store the expected version + # Find the current cargo version and store it in the output: `expected_version` + shell: bash + id: expected_version + run: | + echo "EXPECTED_VERSION=$(python3 scripts/version_util.py --bare_cargo_version)" >> "$GITHUB_OUTPUT" + + - name: Check the expected version + if: ${{ inputs.RELEASE_VERSION != 'prerelease' }} + run: | + if [ "${{steps.expected_version.outputs.EXPECTED_VERSION}}" != "${{inputs.RELEASE_VERSION}}" ]; then + echo "Error: EXPECTED_VERSION (${{steps.expected_version.outputs.EXPECTED_VERSION}}) does not match RELEASE_VERSION (${{inputs.RELEASE_VERSION}})" + exit 1 + fi + + - name: Build web-viewer (release) + uses: actions-rs/cargo@v1 + with: + command: run + args: --locked -p re_build_web_viewer -- --release ${{ inputs.EXTRA_FLAGS }} + + - name: Upload web assets + uses: actions/upload-artifact@v3 + with: + name: web_viewer + path: web_viewer diff --git a/.github/workflows/reusable_checks.yml b/.github/workflows/reusable_checks.yml new file mode 100644 index 000000000000..5377077e2843 --- /dev/null +++ b/.github/workflows/reusable_checks.yml @@ -0,0 +1,328 @@ +name: 'Checks: Lints, Tests, Docs' + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + SAVE_CACHE: + required: false + type: boolean + default: false + SAVE_PY_DOCS: + required: false + type: boolean + default: false + SAVE_PY_DOCS_AS: + required: false + type: string + default: "" + SAVE_RUST_DOCS: + required: false + type: boolean + default: false + +concurrency: + group: ${{ inputs.CONCURRENCY }}-checks + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.8" + # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses + # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html + # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings + + # See https://github.com/ericseppanen/cargo-cranky/issues/8 + RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs + + # See: https://github.com/marketplace/actions/sccache-action + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + +# --------------------------------------------------------------------------- + + py-lints: + name: Python lints (black, mypy, flake8) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: extractions/setup-just@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + just-version: 1.5 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: "pip" + cache-dependency-path: "rerun_py/requirements-lint.txt" + + - name: Install Python dependencies + run: | + pip install --upgrade pip + pip install -r rerun_py/requirements-lint.txt + + - name: Lint Python + run: | + just py-lint + + - name: Check requirements + run: | + just py-requirements + +# --------------------------------------------------------------------------- + + py-test-docs: + name: Test Python Docs + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: "pip" + cache-dependency-path: "rerun_py/requirements-doc.txt" + + - name: Install Python dependencies + run: | + pip install --upgrade pip + pip install -r rerun_py/requirements-doc.txt + + - name: Build via mkdocs + run: | + mkdocs build -f rerun_py/mkdocs.yml + +# --------------------------------------------------------------------------- + + rs-lints: + name: Rust lints (fmt, check, cranky, tests, doc) + runs-on: ubuntu-latest-16-cores + container: + image: rerunio/ci_docker:0.6 + steps: + - name: Show context + run: | + echo "GITHUB_CONTEXT": $GITHUB_CONTEXT + echo "JOB_CONTEXT": $JOB_CONTEXT + echo "INPUTS_CONTEXT": $INPUTS_CONTEXT + echo "ENV_CONTEXT": $ENV_CONTEXT + env: + ENV_CONTEXT: ${{ toJson(env) }} + GITHUB_CONTEXT: ${{ toJson(github) }} + JOB_CONTEXT: ${{ toJson(job) }} + INPUTS_CONTEXT: ${{ toJson(inputs) }} + + - uses: actions/checkout@v3 + + # Rust-cache will cache our dependencies, which is a large chunk of the build + # See: https://github.com/Swatinem/rust-cache + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-linux" + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # Don't update the cache -- it will be updated by the lint job + # TODO(jleibs): this job will likely run before rust.yml updates + # the cache. Better cross-job sequencing would be nice here + save-if: ${{ inputs.SAVE_CACHE }} + + # Sccache will cache everything else + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + + # First do our check with --locked to make sure `Cargo.lock` is up to date + - name: Check all features + uses: actions-rs/cargo@v1 + with: + command: check + args: --locked --all-features + + - name: Rustfmt + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + + - name: Cranky + uses: actions-rs/cargo@v1 + with: + command: cranky + args: --all-targets --all-features -- --deny warnings + + # Check a few important permutations of the feature flags for our `rerun` library: + - name: Check rerun with `--no-default-features`` + uses: actions-rs/cargo@v1 + with: + command: cranky + args: --locked -p rerun --no-default-features + + - name: Check rerun with `--features sdk` + uses: actions-rs/cargo@v1 + with: + command: cranky + args: --locked -p rerun --no-default-features --features sdk + + - name: Test doc-tests + uses: actions-rs/cargo@v1 + with: + command: test + args: --doc --all-features + + - name: cargo doc + uses: actions-rs/cargo@v1 + with: + command: doc + args: --no-deps --all-features + + - name: cargo doc --document-private-items + uses: actions-rs/cargo@v1 + with: + command: doc + args: --document-private-items --no-deps --all-features + + - name: Test + uses: actions-rs/cargo@v1 + with: + command: test + args: --all-targets --all-features + +# --------------------------------------------------------------------------- + + rs-check-wasm: + name: Check Rust web build (wasm32 + wasm-bindgen) + runs-on: ubuntu-latest-16-cores + container: + image: rerunio/ci_docker:0.6 + steps: + - uses: actions/checkout@v3 + + - uses: actions-rs/toolchain@v1 + with: + profile: default + toolchain: 1.67.1 + target: wasm32-unknown-unknown + override: true + + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-wasm" + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # See: https://github.com/rerun-io/rerun/pull/497 + save-if: ${{ inputs.SAVE_CACHE }} + + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + - name: clippy check re_viewer wasm32 + run: ./scripts/clippy_wasm.sh + + - name: Check re_renderer examples wasm32 + uses: actions-rs/cargo@v1 + with: + command: check + args: --locked --target wasm32-unknown-unknown --target-dir target_wasm -p re_renderer --examples + + - name: Build web-viewer (debug) + uses: actions-rs/cargo@v1 + with: + command: run + args: --locked -p re_build_web_viewer -- --debug + +# --------------------------------------------------------------------------- + + toml-lints: + name: Lint TOML files + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v3 + + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: 1.67.1 + override: true + + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + # See: https://github.com/rerun-io/rerun/pull/497 + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + # Don't update the cache -- it will be updated by the lint job + # TODO(jleibs): this job will likely run before rust.yml updates + # the cache. Better cross-job sequencing would be nice here + save-if: false + + - name: Install taplo-cli + uses: baptiste0928/cargo-install@v1 + with: + crate: taplo-cli + + - name: Taplo check + run: | + taplo fmt --check + +# --------------------------------------------------------------------------- + + misc-rerun-lints: + name: Rerun lints + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + + - name: Rerun lints + run: | + ./scripts/lint.py + + - name: Check for too large files + run: | + ./scripts/check_large_files.sh + +# --------------------------------------------------------------------------- + + spell-check: + name: Spell Check + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v3 + + - name: Check spelling of entire workspace + uses: crate-ci/typos@master + +# --------------------------------------------------------------------------- + + rs-cargo-deny: + name: Cargo Deny ${{ matrix.platform }} + runs-on: ubuntu-latest + container: + image: rerunio/ci_docker:0.6 + + steps: + - uses: actions/checkout@v3 + + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + - name: Cargo Deny + shell: bash + id: expected_version + run: ./scripts/cargo_deny.sh diff --git a/.github/workflows/reusable_deploy_docs.yml b/.github/workflows/reusable_deploy_docs.yml new file mode 100644 index 000000000000..7f65d55ed540 --- /dev/null +++ b/.github/workflows/reusable_deploy_docs.yml @@ -0,0 +1,158 @@ +name: 'Reusable Deploy Docs' + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + PY_DOCS_VERSION_NAME: + required: true + type: string + UPDATE_LATEST: + required: false + type: boolean + default: false + +concurrency: + group: ${{ inputs.CONCURRENCY }}-deploy-docs + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.8" + # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses + # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html + # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings + + # See https://github.com/ericseppanen/cargo-cranky/issues/8 + RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs + + # See: https://github.com/marketplace/actions/sccache-action + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + +jobs: + +# --------------------------------------------------------------------------- + + py-deploy-docs: + name: Python + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # Don't do a shallow clone + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: "pip" + cache-dependency-path: "rerun_py/requirements-doc.txt" + + - name: Install Python dependencies + run: | + pip install --upgrade pip + pip install -r rerun_py/requirements-doc.txt + + - name: Set up git author + run: | + remote_repo="https://${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Mike will incrementally update the existing gh-pages branch + # We then check it out, and reset it to a new orphaned branch, which we force-push to origin + # to make sure we don't accumulate unnecessary history in gh-pages branch + - name: Deploy via mike # https://github.com/jimporter/mike + if: ${{ inputs.UPDATE_LATEST }} + run: | + git fetch + mike deploy -F rerun_py/mkdocs.yml --rebase -b gh-pages --prefix docs/python -u ${{inputs.PY_DOCS_VERSION_NAME}} latest + git checkout gh-pages + git checkout --orphan gh-pages-orphan + git commit -m "Update docs for ${GITHUB_SHA}" + git push origin gh-pages-orphan:gh-pages -f + + # Mike will incrementally update the existing gh-pages branch + # We then check it out, and reset it to a new orphaned branch, which we force-push to origin + # to make sure we don't accumulate unnecessary history in gh-pages branch + - name: Deploy tag via mike # https://github.com/jimporter/mike + if: ${{ ! inputs.UPDATE_LATEST }} + run: | + git fetch + mike deploy -F rerun_py/mkdocs.yml --rebase -b gh-pages --prefix docs/python ${{inputs.PY_DOCS_VERSION_NAME}} + git checkout gh-pages + git checkout --orphan gh-pages-orphan + git commit -m "Update docs for ${GITHUB_SHA}" + git push origin gh-pages-orphan:gh-pages -f + + +# --------------------------------------------------------------------------- + + rs-deploy-docs: + name: Rust + runs-on: ubuntu-latest-16-cores + container: + image: rerunio/ci_docker:0.6 + steps: + - name: Show context + run: | + echo "GITHUB_CONTEXT": $GITHUB_CONTEXT + echo "JOB_CONTEXT": $JOB_CONTEXT + echo "INPUTS_CONTEXT": $INPUTS_CONTEXT + echo "ENV_CONTEXT": $ENV_CONTEXT + env: + ENV_CONTEXT: ${{ toJson(env) }} + GITHUB_CONTEXT: ${{ toJson(github) }} + JOB_CONTEXT: ${{ toJson(job) }} + INPUTS_CONTEXT: ${{ toJson(inputs) }} + + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # Don't do a shallow clone since we need to push gh-pages + + # Rust-cache will cache our dependencies, which is a large chunk of the build + # See: https://github.com/Swatinem/rust-cache + - name: Set up cargo cache + uses: Swatinem/rust-cache@v2 + with: + shared-key: "build-linux" + env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY + save-if: false + + # Sccache will cache everything else + # See: https://github.com/marketplace/actions/sccache-action + - name: Run sccache-cache + uses: mozilla-actions/sccache-action@v0.0.3 + + - name: cargo doc --document-private-items + uses: actions-rs/cargo@v1 + with: + command: doc + args: --document-private-items --no-deps --all-features + + - name: Set up git author + run: | + remote_repo="https://${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up ghp-import + run: pip install ghp-import + + - name: Patch in a redirect page + run: echo "" > target/doc/index.html + env: + REDIRECT_CRATE: rerun + + # See: https://github.com/c-w/ghp-import + - name: Deploy the docs + run: | + git fetch + python3 -m ghp_import -n -p -x docs/rust/head target/doc/ -m "Update the rust docs" diff --git a/.github/workflows/reusable_pip_index.yml b/.github/workflows/reusable_pip_index.yml new file mode 100644 index 000000000000..6e3203a011a0 --- /dev/null +++ b/.github/workflows/reusable_pip_index.yml @@ -0,0 +1,52 @@ +name: Reusable Pip Index + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + +concurrency: + group: ${{ inputs.CONCURRENCY }}-pip-index + cancel-in-progress: true + +jobs: + pr-summary: + name: Create a Pip Index file + + permissions: + contents: "read" + id-token: "write" + + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.x + + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: 'Set up Cloud SDK' + uses: 'google-github-actions/setup-gcloud@v1' + with: + version: '>= 363.0.0' + + - name: Install deps + run: pip install google-cloud-storage Jinja2 + + - name: Render pip index and upload to gcloud + run: | + python scripts/generate_prerelease_pip_index.py \ + --commit ${{ github.sha }} \ + --upload + diff --git a/.github/workflows/reusable_pr_summary.yml b/.github/workflows/reusable_pr_summary.yml new file mode 100644 index 000000000000..b74d8c0799a8 --- /dev/null +++ b/.github/workflows/reusable_pr_summary.yml @@ -0,0 +1,60 @@ +name: Reusable PR Summary + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + PR_NUMBER: + required: true + type: string + +concurrency: + group: ${{ inputs.CONCURRENCY }}-pr-summary + cancel-in-progress: true + +jobs: + pr-summary: + name: Create HTML summary for PR + + permissions: + contents: "read" + id-token: "write" + pull-requests: "write" + + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.x + + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: 'Set up Cloud SDK' + uses: 'google-github-actions/setup-gcloud@v1' + with: + version: '>= 363.0.0' + + - name: Install deps + run: pip install google-cloud-storage Jinja2 PyGithub # NOLINT + + - name: Render HTML template + run: | + python scripts/generate_pr_summary.py \ + --github-token ${{secrets.GITHUB_TOKEN}} \ + --github-repository ${GITHUB_REPOSITORY} \ + --pr-number ${{ inputs.PR_NUMBER }} \ + --upload + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ inputs.PR_NUMBER }} diff --git a/.github/workflows/reusable_run_notebook.yml b/.github/workflows/reusable_run_notebook.yml new file mode 100644 index 000000000000..c2ccd4377316 --- /dev/null +++ b/.github/workflows/reusable_run_notebook.yml @@ -0,0 +1,82 @@ +name: Reusable Buld and Upload Notebook + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + WHEEL_ARTIFACT_NAME: + required: false + type: string + default: '' + # We need this because PRs use a merged commit but we really want + # to track uploads based on the source commit. + UPLOAD_COMMIT_OVERRIDE: + required: false + type: string + default: '' + +concurrency: + group: ${{ inputs.CONCURRENCY }}-run-notebook + cancel-in-progress: true + +jobs: + + run-notebook: + name: Run notebook + + permissions: + contents: "read" + id-token: "write" + + runs-on: ubuntu-latest + + container: + image: rerunio/ci_docker:0.6 + + steps: + - uses: actions/checkout@v3 + + - name: Download Wheel + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.WHEEL_ARTIFACT_NAME }} + path: wheel + + - name: Install built wheel + shell: bash + run: | + pip install --find-links wheel rerun-sdk + + - name: Install Deps + shell: bash + run: | + pip install -r examples/python/notebook/requirements.txt + + - name: Create notebook + shell: bash + run: | + jupyter nbconvert --to=html --ExecutePreprocessor.enabled=True examples/python/notebook/cube.ipynb --output /tmp/cube.html + + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: Add SHORT_SHA env property with commit short sha + run: | + if [ -z "${{ inputs.UPLOAD_COMMIT_OVERRIDE }}" ]; then + USED_SHA=${{ github.sha }} + else + USED_SHA=${{ inputs.UPLOAD_COMMIT_OVERRIDE }} + fi + echo "SHORT_SHA=$(echo $USED_SHA | cut -c1-7)" >> $GITHUB_ENV + + - name: "Upload Notebook" + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "/tmp/cube.html" + destination: "rerun-builds/commit/${{env.SHORT_SHA}}/notebooks" + parent: false diff --git a/.github/workflows/reusable_upload_web.yml b/.github/workflows/reusable_upload_web.yml new file mode 100644 index 000000000000..41bcd4896d11 --- /dev/null +++ b/.github/workflows/reusable_upload_web.yml @@ -0,0 +1,143 @@ +name: Reusable Upload Web + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + ADHOC_NAME: + type: string + required: false + default: '' + MARK_PRERELEASE_FOR_MAINLINE: + required: false + type: boolean + default: false + MARK_TAGGED_VERSION: + required: false + type: boolean + default: false + RELEASE_VERSION: + required: false + type: string + default: 'prerelease' + RRD_ARTIFACT_NAME: + required: false + type: string + default: '' + # We need this because PRs use a merged commit but we really want + # to track uploads based on the source commit. + UPLOAD_COMMIT_OVERRIDE: + required: false + type: string + default: '' + +concurrency: + group: ${{ inputs.CONCURRENCY }}-upload-web + cancel-in-progress: true + +jobs: + + upload-web: + name: Upload web build to google cloud (wasm32 + wasm-bindgen) + permissions: + contents: "read" + id-token: "write" + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Download RRD + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.RRD_ARTIFACT_NAME }} + path: rrd + + - name: Download Web Viewer + uses: actions/download-artifact@v3 + with: + name: web_viewer + path: web_viewer + + + # Upload the wasm, html etc to a Google cloud bucket: + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: Add SHORT_SHA env property with commit short sha + run: | + if [ -z "${{ inputs.UPLOAD_COMMIT_OVERRIDE }}" ]; then + USED_SHA=${{ github.sha }} + else + USED_SHA=${{ inputs.UPLOAD_COMMIT_OVERRIDE }} + fi + echo "SHORT_SHA=$(echo $USED_SHA | cut -c1-7)" >> $GITHUB_ENV + + - name: "Upload web-viewer (commit)" + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "web_viewer" + destination: "rerun-web-viewer/commit/${{env.SHORT_SHA}}" + parent: false + + - name: "Upload RRD (commit)" + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "rrd" + destination: "rerun-example-rrd/commit/${{env.SHORT_SHA}}" + parent: false + + - name: "Upload web-viewer (prerelease)" + if: inputs.MARK_PRERELEASE_FOR_MAINLINE + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "web_viewer" + destination: "rerun-web-viewer/prerelease" + parent: false + + + - name: "Upload RRD (prerelease)" + if: inputs.MARK_PRERELEASE_FOR_MAINLINE + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "rrd" + destination: "rerun-example-rrd/prerelease" + parent: false + + - name: "Upload web-viewer (tagged)" + if: inputs.MARK_TAGGED_VERSION + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "web_viewer" + destination: "rerun-web-viewer/version/${{inputs.RELEASE_VERSION}}" + parent: false + + - name: "Upload RRD (tagged)" + if: inputs.MARK_TAGGED_VERSION + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "rrd" + destination: "rerun-example-rrd/version/${{inputs.RELEASE_VERSION}}" + parent: false + + - name: "Upload web-viewer (adhoc)" + if: ${{ inputs.ADHOC_NAME != '' }} + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "web_viewer" + destination: "rerun-web-viewer/adhoc/${{inputs.ADHOC_NAME}}" + parent: false + + - name: "Upload RRD (adhoc)" + if: ${{ inputs.ADHOC_NAME != '' }} + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "rrd" + destination: "rerun-example-rrd/adhoc/${{inputs.ADHOC_NAME}}" + parent: false diff --git a/.github/workflows/reusable_upload_wheels.yml b/.github/workflows/reusable_upload_wheels.yml new file mode 100644 index 000000000000..4820598e00dd --- /dev/null +++ b/.github/workflows/reusable_upload_wheels.yml @@ -0,0 +1,103 @@ +name: Reusable Upload Wheels + +on: + workflow_call: + inputs: + CONCURRENCY: + required: true + type: string + RRD_ARTIFACT_NAME: + required: false + type: string + default: '' + WHEEL_ARTIFACT_NAME: + required: false + type: string + default: '' + +concurrency: + group: ${{ inputs.CONCURRENCY }}-upload-wheels + cancel-in-progress: true + +jobs: + + upload-wheel: + name: Upload Wheel to google cloud + + runs-on: ubuntu-latest + + container: + image: rerunio/ci_docker:0.6 + + permissions: + contents: "read" + id-token: "write" + + steps: + - name: Show context + run: | + echo "GITHUB_CONTEXT": $GITHUB_CONTEXT + echo "JOB_CONTEXT": $JOB_CONTEXT + echo "INPUTS_CONTEXT": $INPUTS_CONTEXT + echo "ENV_CONTEXT": $ENV_CONTEXT + echo "MATRIX_CONTEXT": $MATRIX_CONTEXT + env: + ENV_CONTEXT: ${{ toJson(env) }} + GITHUB_CONTEXT: ${{ toJson(github) }} + JOB_CONTEXT: ${{ toJson(job) }} + INPUTS_CONTEXT: ${{ toJson(inputs) }} + + - uses: actions/checkout@v3 + + - name: Download RRD + uses: actions/download-artifact@v3 + with: + name: ${{inputs.RRD_ARTIFACT_NAME}} + path: rrd + + - name: Download Wheel + uses: actions/download-artifact@v3 + with: + name: ${{inputs.WHEEL_ARTIFACT_NAME}} + path: pre-dist + + - name: Unpack the wheel + shell: bash + run: | + mkdir unpack-dist + wheel unpack pre-dist/*.whl --dest unpack-dist + + - name: Get the folder name + shell: bash + id: get_folder_name + run: | + echo "PKG_FOLDER=$(ls unpack-dist)" >> "$GITHUB_OUTPUT" + + - name: Insert the rrd + shell: bash + # If you change the line below you should almost definitely change the `key:` line + # in 'Cache RRD dataset'reusable_build_and_test.yml + run: | + cp rrd/colmap_fiat.rrd unpack-dist/${{ steps.get_folder_name.outputs.PKG_FOLDER }}/rerun_sdk/rerun_demo/colmap_fiat.rrd + + - name: Repack the wheel + shell: bash + run: | + mkdir dist + wheel pack unpack-dist/${{ steps.get_folder_name.outputs.PKG_FOLDER }} --dest dist/ + + - id: "auth" + uses: google-github-actions/auth@v1 + with: + workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + + - name: Add SHORT_SHA env property with commit short sha + run: echo "SHORT_SHA=`echo ${{github.sha}} | cut -c1-7`" >> $GITHUB_ENV + + - name: "Upload Wheel" + uses: google-github-actions/upload-cloud-storage@v1 + with: + path: "dist" + destination: "rerun-builds/commit/${{env.SHORT_SHA}}/wheels" + parent: false diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml deleted file mode 100644 index f48703b53395..000000000000 --- a/.github/workflows/rust.yml +++ /dev/null @@ -1,368 +0,0 @@ -name: CI (Rust) - -on: - push: - branches: - - "main" - pull_request: - workflow_dispatch: - inputs: - force_update_web_build: - description: "Upload web build to google cloud" - required: true - default: false - type: boolean - force_run_benchmarks: - description: "Run the rust benchmarks" - required: true - default: false - type: boolean - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} # Cancel previous CI jobs on the same branch - cancel-in-progress: true - -env: - # web_sys_unstable_apis is required to enable the web_sys clipboard API which egui_web uses - # https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html - # https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html - RUSTFLAGS: --cfg=web_sys_unstable_apis --deny warnings - - # See https://github.com/ericseppanen/cargo-cranky/issues/8 - RUSTDOCFLAGS: --deny warnings --deny rustdoc::missing_crate_level_docs - -permissions: - # deployments permission to deploy GitHub pages website - deployments: write - # contents permission to update benchmark contents in gh-pages branch - contents: write - -jobs: - # See the benchmarks at https://ref.rerun.io/dev/bench/ - rs-benchmarks: - name: Rust Criterion benchmarks - - # Only run the rs-benchmarks job when a commit gets pushed to main or we manually specify it - if: ${{ github.ref == 'refs/heads/main' || github.event.inputs.force_run_benchmarks }} - - runs-on: ubuntu-latest-16-cores - container: - image: rerunio/ci_docker:0.6 - env: - RUSTFLAGS: ${{env.RUSTFLAGS}} - RUSTDOCFLAGS: ${{env.RUSTDOCFLAGS}} - steps: - - uses: actions/checkout@v3 - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # Don't update the cache -- it will be updated by the lint job - # TODO(jleibs): this job will likely run before rust.yml updates - # the cache. Better cross-job sequencing would be nice here - save-if: false - - - name: Run benchmark - # Use bash shell so we get pipefail behavior with tee - shell: bash - run: | - cargo bench \ - --all-features \ - -p re_arrow_store \ - -p re_data_store \ - -p re_log_encoding \ - -p re_query \ - -p re_tuid \ - -- --output-format=bencher | tee output.txt - - - name: Store benchmark result - # https://github.com/benchmark-action/github-action-benchmark - uses: benchmark-action/github-action-benchmark@v1 - with: - name: Rust Benchmark - tool: "cargo" - output-file-path: output.txt - github-token: ${{ secrets.GITHUB_TOKEN }} - - # Show alert with commit comment on detecting possible performance regression - comment-on-alert: true - alert-threshold: "150%" - fail-on-alert: true - comment-always: false # Generates too much GitHub notification spam - - # Save, results and push to GitHub only on main - save-data-file: ${{ github.ref == 'refs/heads/main' }} - auto-push: ${{ github.ref == 'refs/heads/main' }} - gh-pages-branch: gh-pages - benchmark-data-dir-path: dev/bench - max-items-in-chart: 30 - - # --------------------------------------------------------------------------- - - rs-lints: - name: Rust lints (fmt, check, cranky, tests, doc) - runs-on: ubuntu-latest-16-cores - container: - image: rerunio/ci_docker:0.6 - env: - RUSTFLAGS: ${{env.RUSTFLAGS}} - RUSTDOCFLAGS: ${{env.RUSTDOCFLAGS}} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # Don't do a shallow clone since we need to push gh-pages - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # See: https://github.com/rerun-io/rerun/pull/497 - save-if: ${{ github.event_name == 'push'}} - - # First do our check with --locked to make sure `Cargo.lock` is up to date - - name: Check all features - uses: actions-rs/cargo@v1 - with: - command: check - args: --locked --all-features - - - name: Rustfmt - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all -- --check - - - name: Cranky - uses: actions-rs/cargo@v1 - with: - command: cranky - args: --all-targets --all-features -- --deny warnings - - # -------------------------------------------------------------------------------- - # Check a few important permutations of the feature flags for our `rerun` library: - - name: Check rerun with `--no-default-features`` - uses: actions-rs/cargo@v1 - with: - command: cranky - args: --locked -p rerun --no-default-features - - - name: Check rerun with `--features sdk` - uses: actions-rs/cargo@v1 - with: - command: cranky - args: --locked -p rerun --no-default-features --features sdk - # -------------------------------------------------------------------------------- - - - name: Test doc-tests - uses: actions-rs/cargo@v1 - with: - command: test - args: --doc --all-features - - - name: cargo doc - uses: actions-rs/cargo@v1 - with: - command: doc - args: --no-deps --all-features - - - name: cargo doc --document-private-items - uses: actions-rs/cargo@v1 - with: - command: doc - args: --document-private-items --no-deps --all-features - - - name: Test - uses: actions-rs/cargo@v1 - with: - command: test - args: --all-targets --all-features - - - name: Set up git author - if: ${{ github.event_name == 'push' }} - run: | - remote_repo="https://${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" - git config --global user.name "${GITHUB_ACTOR}" - git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Set up ghp-import - if: ${{ github.event_name == 'push' }} - run: pip install ghp-import - - - name: Patch in a redirect page - if: ${{ github.event_name == 'push' }} - run: echo "" > target/doc/index.html - env: - REDIRECT_CRATE: rerun - - # See: https://github.com/c-w/ghp-import - - name: Deploy the docs - if: ${{ github.event_name == 'push' }} - run: | - git fetch - python3 -m ghp_import -n -p -x docs/rust/head target/doc/ -m "Update the rust docs" - - # --------------------------------------------------------------------------- - - rs-check-wasm: - name: Check Rust web build (wasm32 + wasm-bindgen) - runs-on: ubuntu-latest-16-cores - container: - image: rerunio/ci_docker:0.6 - env: - RUSTFLAGS: ${{env.RUSTFLAGS}} - RUSTDOCFLAGS: ${{env.RUSTDOCFLAGS}} - steps: - - uses: actions/checkout@v2 - - - uses: actions-rs/toolchain@v1 - with: - profile: default - toolchain: 1.67.1 - target: wasm32-unknown-unknown - override: true - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # See: https://github.com/rerun-io/rerun/pull/497 - save-if: ${{ github.event_name == 'push'}} - - - name: clippy check re_viewer wasm32 - run: ./scripts/clippy_wasm.sh - - - name: Check re_renderer examples wasm32 - uses: actions-rs/cargo@v1 - with: - command: check - args: --locked --target wasm32-unknown-unknown --target-dir target_wasm -p re_renderer --examples - - - name: Build web-viewer (debug) - uses: actions-rs/cargo@v1 - with: - command: run - args: --locked -p re_build_web_viewer -- --debug - - # --------------------------------------------------------------------------- - - rs-build-web-viewer: - name: Upload web build to google cloud (wasm32 + wasm-bindgen) - permissions: - contents: "read" - id-token: "write" - - if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') || github.event.inputs.force_update_web_build - runs-on: ubuntu-latest-16-cores - container: - image: rerunio/ci_docker:0.6 - env: - RUSTFLAGS: ${{env.RUSTFLAGS}} - RUSTDOCFLAGS: ${{env.RUSTDOCFLAGS}} - steps: - - uses: actions/checkout@v2 - - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: 1.67.0 - target: wasm32-unknown-unknown - override: true - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # See: https://github.com/rerun-io/rerun/pull/497 - save-if: ${{ github.event_name == 'push'}} - - - name: Patch Cargo.toml for pre-release - if: "!startsWith(github.ref , 'refs/tags/v')" - # After patching the pre-release version, run cargo update. - # This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing - run: | - python3 scripts/version_util.py --patch_prerelease - cargo update -w - - - name: Build web-viewer (release) - uses: actions-rs/cargo@v1 - with: - command: run - args: --locked -p re_build_web_viewer -- --release - - # Upload the wasm, html etc to a Google cloud bucket: - - id: "auth" - uses: google-github-actions/auth@v1 - with: - workload_identity_provider: ${{ secrets.GOOGLE_WORKLOAD_IDENTITY_PROVIDER }} - service_account: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} - - - name: Add SHORT_SHA env property with commit short sha - run: echo "SHORT_SHA=`echo ${{github.sha}} | cut -c1-7`" >> $GITHUB_ENV - - - name: "Upload web-viewer (commit)" - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "web_viewer" - destination: "rerun-web-viewer/commit/${{env.SHORT_SHA}}" - parent: false - - - name: "Upload web-viewer (prerelease)" - if: github.ref == 'refs/heads/main' - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "web_viewer" - destination: "rerun-web-viewer/prerelease" - parent: false - - - name: "Upload web-viewer (tagged)" - if: startsWith(github.ref, 'refs/tags/v') - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "web_viewer" - destination: "rerun-web-viewer/version/${{github.ref_name}}" - parent: false - - - name: "Upload web-viewer (latest release)" - if: github.ref == 'latest' - uses: google-github-actions/upload-cloud-storage@v1 - with: - path: "web_viewer" - destination: "rerun-web-viewer/latest" - parent: false - - # --------------------------------------------------------------------------- - - rs-cargo-deny: - name: Check Rust dependencies (cargo-deny) - runs-on: ubuntu-latest-16-cores - container: - image: rerunio/ci_docker:0.6 - env: - RUSTFLAGS: ${{env.RUSTFLAGS}} - RUSTDOCFLAGS: ${{env.RUSTDOCFLAGS}} - - # TODO(emilk): remove this matrix when https://github.com/EmbarkStudios/cargo-deny/issues/324 is resolved - strategy: - fail-fast: false - matrix: - platform: - - i686-pc-windows-gnu - - i686-pc-windows-msvc - - i686-unknown-linux-gnu - - wasm32-unknown-unknown - - x86_64-apple-darwin - - x86_64-pc-windows-gnu - - x86_64-pc-windows-msvc - - x86_64-unknown-linux-gnu - - x86_64-unknown-redox - - steps: - - uses: actions/checkout@v3 - - uses: EmbarkStudios/cargo-deny-action@v1 - with: - command: check - log-level: error - arguments: --all-features --target ${{ matrix.platform }} diff --git a/.github/workflows/toml.yml b/.github/workflows/toml.yml deleted file mode 100644 index f3a60ec74885..000000000000 --- a/.github/workflows/toml.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: CI (TOML) - -on: - pull_request: - push: - branches: - - "main" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} # Cancel previous CI jobs on the same branch - cancel-in-progress: true - -jobs: - toml-lints: - name: Lint TOML files - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: 1.67.1 - override: true - - - name: Set up cargo cache - uses: Swatinem/rust-cache@v2 - with: - # See: https://github.com/rerun-io/rerun/pull/497 - env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY - # Don't update the cache -- it will be updated by the lint job - # TODO(jleibs): this job will likely run before rust.yml updates - # the cache. Better cross-job sequencing would be nice here - save-if: false - - - name: Install taplo-cli - uses: baptiste0928/cargo-install@v1 - with: - crate: taplo-cli - - - name: Taplo check - run: | - taplo fmt --check diff --git a/.github/workflows/typos.yml b/.github/workflows/typos.yml deleted file mode 100644 index c3bc84125f51..000000000000 --- a/.github/workflows/typos.yml +++ /dev/null @@ -1,17 +0,0 @@ -# https://github.com/crate-ci/typos -# Add exceptions to _typos.toml -# install and run locally: cargo install typos-cli && typos - -name: Spell Check -on: [pull_request] - -jobs: - run: - name: Spell Check - runs-on: ubuntu-latest - steps: - - name: Checkout Actions Repository - uses: actions/checkout@v2 - - - name: Check spelling of entire workspace - uses: crate-ci/typos@master diff --git a/.vscode/extensions.json b/.vscode/extensions.json index aae305c13ea6..32731cdfadeb 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -2,17 +2,18 @@ // See https://go.microsoft.com/fwlink/?LinkId=827846 // for the documentation about the extensions.json format "recommendations": [ - "charliermarsh.ruff", // Ruff for linting + "charliermarsh.ruff", + "github.vscode-github-actions", "ms-python.python", - "ms-vsliveshare.vsliveshare", // Live Share + "ms-vsliveshare.vsliveshare", "polymeilex.wgsl", - "rust-lang.rust-analyzer", // Rust-analyzer + "rust-lang.rust-analyzer", "serayuzgur.crates", "streetsidesoftware.code-spell-checker", - "tamasfe.even-better-toml", // TOML LSP - "vadimcn.vscode-lldb", // CodeLLDB - "wayou.vscode-todo-highlight", // TODO Highlight - "webfreak.debug", // Native Debug - "zxh404.vscode-proto3", // vscode-proto3 + "tamasfe.even-better-toml", + "vadimcn.vscode-lldb", + "wayou.vscode-todo-highlight", + "webfreak.debug", + "zxh404.vscode-proto3", ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index c52ea2c7178d..1722a88e03b3 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,82 +1,75 @@ { - "editor.formatOnSave": true, - "editor.semanticTokenColorCustomizations": { - "rules": { - "*.unsafe:rust": "#eb5046" - } - }, - "files.insertFinalNewline": true, - "files.trimTrailingWhitespace": true, - "files.exclude": { - "env/**": true, - "target/**": true, - "target_ra/**": true, - "target_wasm/**": true, - }, - "files.autoGuessEncoding": true, - "python.formatting.provider": "black", - "python.formatting.blackArgs": [ - "--config", - "rerun_py/pyproject.toml" - ], - "python.linting.mypyEnabled": true, - "python.linting.enabled": true, - "cSpell.words": [ - "andreas", - "bbox", - "bindgroup", - "colormap", - "emath", - "framebuffer", - "hoverable", - "ilog", - "jumpflooding", - "Keypoint", - "memoffset", - "nyud", - "objectron", - "Readback", - "readbacks", - "Skybox", - "smallvec", - "swapchain", - "texcoords", - "Tonemapper", - "tonemapping", - "voronoi", - "vram", - "Wgsl" - ], - // don't share a cargo lock with rust-analyzer. - // see https://github.com/rerun-io/rerun/pull/519 for rationale - "rust-analyzer.checkOnSave.overrideCommand": [ - "cargo", - "cranky", - "--target-dir=target_ra", - "--workspace", - "--message-format=json", - "--all-targets", - "--all-features", // --all-features will set the `__ci` feature flag, which stops crates/re_web_viewer_server/build.rs from building the web viewer - ], - "rust-analyzer.cargo.buildScripts.overrideCommand": [ - "cargo", - "check", - "--quiet", - "--target-dir=target_ra", - "--workspace", - "--message-format=json", - "--all-targets", - "--all-features", // --all-features will set the `__ci` feature flag, which stops crates/re_web_viewer_server/build.rs from building the web viewer - ], - // Our build scripts are generating code. - // Having Rust Analyzer do this while doing other builds can lead to catastrophic failures. - // INCLUDING attempts to publish a new release! - "rust-analyzer.cargo.buildScripts.enable": false, - "python.analysis.extraPaths": [ - "rerun_py/rerun_sdk" - ], - "ruff.args": [ - "--config", - "rerun_py/pyproject.toml" - ], + "editor.formatOnSave": true, + "editor.semanticTokenColorCustomizations": { + "rules": { + "*.unsafe:rust": "#eb5046" + } + }, + "files.insertFinalNewline": true, + "files.trimTrailingWhitespace": true, + "files.exclude": { + "env/**": true, + "target/**": true, + "target_ra/**": true, + "target_wasm/**": true + }, + "files.autoGuessEncoding": true, + "python.formatting.provider": "black", + "python.formatting.blackArgs": ["--config", "rerun_py/pyproject.toml"], + "python.linting.mypyEnabled": true, + "python.linting.enabled": true, + "cSpell.words": [ + "andreas", + "bbox", + "bindgroup", + "colormap", + "emath", + "framebuffer", + "hoverable", + "ilog", + "jumpflooding", + "Keypoint", + "memoffset", + "nyud", + "objectron", + "Readback", + "readbacks", + "Skybox", + "smallvec", + "swapchain", + "texcoords", + "Tonemapper", + "tonemapping", + "unsmoothed", + "voronoi", + "vram", + "Wgsl" + ], + // don't share a cargo lock with rust-analyzer. + // see https://github.com/rerun-io/rerun/pull/519 for rationale + "rust-analyzer.checkOnSave.overrideCommand": [ + "cargo", + "cranky", + "--target-dir=target_ra", + "--workspace", + "--message-format=json", + "--all-targets", + "--all-features" // --all-features will set the `__ci` feature flag, which stops crates/re_web_viewer_server/build.rs from building the web viewer + ], + "rust-analyzer.cargo.buildScripts.overrideCommand": [ + "cargo", + "check", + "--quiet", + "--target-dir=target_ra", + "--workspace", + "--message-format=json", + "--all-targets", + "--all-features" // --all-features will set the `__ci` feature flag, which stops crates/re_web_viewer_server/build.rs from building the web viewer + ], + // Our build scripts are generating code. + // Having Rust Analyzer do this while doing other builds can lead to catastrophic failures. + // INCLUDING attempts to publish a new release! + "rust-analyzer.cargo.buildScripts.enable": false, + "python.analysis.extraPaths": ["rerun_py/rerun_sdk"], + "ruff.args": ["--config", "rerun_py/pyproject.toml"] } diff --git a/BUILD.md b/BUILD.md index 011e4d29b023..433d64953270 100644 --- a/BUILD.md +++ b/BUILD.md @@ -1,22 +1,22 @@ # Building Rerun -This is a guide to how to build Rerun. +This is a guide to how to build Rerun. ## See also -* [`rerun_py/README.md`](rerun_py/README.md) - build instructions for Python SDK -* [`ARCHITECTURE.md`](ARCHITECTURE.md) -* [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) -* [`CODE_STYLE.md`](CODE_STYLE.md) -* [`CONTRIBUTING.md`](CONTRIBUTING.md) -* [`RELEASES.md`](RELEASES.md) +- [`rerun_py/README.md`](rerun_py/README.md) - build instructions for Python SDK +- [`ARCHITECTURE.md`](ARCHITECTURE.md) +- [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) +- [`CODE_STYLE.md`](CODE_STYLE.md) +- [`CONTRIBUTING.md`](CONTRIBUTING.md) +- [`RELEASES.md`](RELEASES.md) ## Getting started with the repository. -* Install the Rust toolchain: -* `git clone git@github.com:rerun-io/rerun.git && cd rerun` -* Run `./scripts/setup_dev.sh`. -* Make sure `cargo --version` prints `1.67.1` once you are done +- Install the Rust toolchain: +- `git clone git@github.com:rerun-io/rerun.git && cd rerun` +- Run `./scripts/setup_dev.sh`. +- Make sure `cargo --version` prints `1.67.1` once you are done ### Apple-silicon Macs @@ -35,7 +35,8 @@ Python API docs can be found at and are built Rust documentation is hosted on . You can build them locally with: `cargo doc --all-features --no-deps --open` ## Build and install the Rerun Python SDK -Rerun is available as a package on PyPi and can be installed with `pip install rerun-sdk` + +Rerun is available as a package on PyPi and can be installed with `pip install depthai-viewer` Additionally, prebuilt dev wheels from head of main are available at . @@ -64,18 +65,19 @@ From here on out, we assume you have this virtualenv activated. ### Build and install You need to setup your build environment once with + ```sh ./scripts/setup.sh ``` Then install the Rerun SDK with: + ``` pip install ./rerun_py ``` > Note: If you are unable to upgrade pip to version `>=21.3`, you need to pass `--use-feature=in-tree-build` to the `pip install` command. - ## Improving compile times As of today, we link everything statically in both debug and release builds, which makes custom linkers and split debuginfo the two most impactful tools we have at our disposal in order to improve compile times. @@ -87,9 +89,11 @@ These tools can configured through your `Cargo` configuration, available at `$HO On x64 macOS, use the [zld](https://github.com/michaeleisel/zld) linker and keep debuginfo in a single separate file. Pre-requisites: + - Install [zld](https://github.com/michaeleisel/zld): `brew install michaeleisel/zld/zld`. `config.toml` (x64): + ```toml [target.x86_64-apple-darwin] rustflags = [ @@ -103,6 +107,7 @@ rustflags = [ On Apple-silicon Mac (M1, M2), the default settings are already pretty good. The default linker is just as good as `zld`. Do NOT set `split-debuginfo=packed`, as that will make linking a lot slower. You can set `split-debuginfo=unpacked` for a small improvement. `config.toml` (M1, M2): + ```toml [target.aarch64-apple-darwin] rustflags = [ @@ -116,9 +121,11 @@ rustflags = [ On Linux, use the [mold](https://github.com/rui314/mold) linker and keep DWARF debuginfo in separate files. Pre-requisites: + - Install [mold](https://github.com/rui314/mold) through your package manager. `config.toml`: + ```toml [target.x86_64-unknown-linux-gnu] linker = "clang" @@ -135,13 +142,16 @@ rustflags = [ On Windows, use LLVM's `lld` linker and keep debuginfo in a single separate file. Pre-requisites: + - Install `lld`: + ``` cargo install -f cargo-binutils rustup component add llvm-tools-preview ``` `config.toml`: + ```toml [target.x86_64-pc-windows-msvc] linker = "rust-lld.exe" diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a7093067a19..5d2060d7981b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,183 @@ ## [Unreleased](https://github.com/rerun-io/rerun/compare/latest...HEAD) … +## [0.5.0](https://github.com/rerun-io/rerun/compare/v0.4.0...v0.5.0) - Jupyter MVP, GPU-based picking & colormapping, new datastore! + +### Overview & Highlights + +This new release adds MVP support for embedding Rerun in Jupyter notebooks, and brings significant performance improvements across all layers of the stack. + +* Rerun can now be embedded in Jupyter notebooks + * Tested with Jupyter Notebook Classic, Jupyter Lab, VSCode & Google Colab; checkout our [How-to guide](https://www.rerun.io/docs/howto/notebook) + * Try it out live on [Google Colab](https://colab.research.google.com/drive/1R9I7s4o6wydQC_zkybqaSRFTtlEaked_?usp=sharing) +* All colormapping tasks are now done directly on the GPU + * This yields _very significant_ performance improvements for colormapping heavy workload (e.g. segmentation) + * Try it out in our new [`segment_anything` example](https://www.rerun.io/docs/getting-started/examples#segment-anything) that shows off the latest models from Meta AI +* GPU picking & hovering now works with all of our primitives, including meshes & depth clouds + * This fixes all the shortcomings of the previous CPU-based system + * Rerun's automatic backprojection of depth textures ("depth clouds") is now feature complete + * Try it out in our updated [`nyud` example](https://www.rerun.io/docs/getting-started/examples#nyud) +* Our datastore has been completely revamped to more closely match our latest data model + * This yields _very significant_ performance improvements for workloads with many events + * Checkout [this post](https://github.com/rerun-io/rerun/issues/1619#issuecomment-1511046649) for a detailed walkthrough of the changes + +### In Detail + +#### 🐍 Python SDK +- Document that we also accept colors in 0-1 floats [#1740](https://github.com/rerun-io/rerun/pull/1740) +- Don't initialize an SDK session if we are only going to be launching the app [#1768](https://github.com/rerun-io/rerun/pull/1768) +- Allow torch tensors for `log_rigid3` [#1769](https://github.com/rerun-io/rerun/pull/1769) +- Always send `recording_id` as part of `LogMsg` [#1778](https://github.com/rerun-io/rerun/pull/1778) +- New `reset_time` API [#1826](https://github.com/rerun-io/rerun/pull/1826) [#1854](https://github.com/rerun-io/rerun/pull/1854) +- Always flush when we remove a sink [#1830](https://github.com/rerun-io/rerun/pull/1830) +- More robust wait for exit condition during .serve() [#1939](https://github.com/rerun-io/rerun/pull/1939) + +#### 🪳 Bug Fixes +- Fix broken outlines (hover/select effect) for lines [#1724](https://github.com/rerun-io/rerun/pull/1724) +- Fix logged obb being displayed with half of the requested size [#1749](https://github.com/rerun-io/rerun/pull/1749) (thanks [@BenjaminDev](https://github.com/BenjaminDev)!) +- Fix `log_obb` usage [#1761](https://github.com/rerun-io/rerun/pull/1761) +- Always create the `log_time` timeline [#1763](https://github.com/rerun-io/rerun/pull/1763) +- Fix undo/redo selection shortcut/action changing selection history without changing selection [#1765](https://github.com/rerun-io/rerun/pull/1765) +- Fix various crashes [#1780](https://github.com/rerun-io/rerun/pull/1780) +- Fix crash when trying to do picking on depth clouds [d94ca3dd35e73e1984ccb969d0c7abd0d3e0faa9](https://github.com/rerun-io/rerun/commit/d94ca3dd35e73e1984ccb969d0c7abd0d3e0faa9) +- ci: fix benchmarks [#1799](https://github.com/rerun-io/rerun/pull/1799) +- ci: fix `cargo deny` [#1806](https://github.com/rerun-io/rerun/pull/1806) +- Fix "too many points" crash [#1822](https://github.com/rerun-io/rerun/pull/1822) +- Allow re-use of `RowId`s if no conflict is possible [#1832](https://github.com/rerun-io/rerun/pull/1832) +- Reduce memory used by staging belts on Web [#1836](https://github.com/rerun-io/rerun/pull/1836) +- Test and handle all tensor dtypes as images [#1840](https://github.com/rerun-io/rerun/pull/1840) +- Fix the python build when running without `web_viewer` enabled [#1856](https://github.com/rerun-io/rerun/pull/1856) +- Error instead of `expect` inside `msg_encode` [#1857](https://github.com/rerun-io/rerun/pull/1857) +- Fix shutdown race condition in `re_sdk_comms` client [#1861](https://github.com/rerun-io/rerun/pull/1861) +- Fix broken instance picking in presence of images [#1876](https://github.com/rerun-io/rerun/pull/1876) +- Make sure JPEGs are always decoded [#1884](https://github.com/rerun-io/rerun/pull/1884) +- Fix crash when saving store to file [#1909](https://github.com/rerun-io/rerun/pull/1909) +- Don't clean up `LogDb`s that only contain a `BeginRecordingMsg` [#1914](https://github.com/rerun-io/rerun/pull/1914) +- Fix picking entities with image + another object (or label) twice [#1908](https://github.com/rerun-io/rerun/pull/1908) +- Fix double clicking camera no longer focusing on said camera [#1911](https://github.com/rerun-io/rerun/pull/1911) +- Fix annotation images sometimes drawn in the background [#1933](https://github.com/rerun-io/rerun/pull/1933) +- Use `zipfile` python library instead of `unzip` command in `arkitscene` demo [#1936](https://github.com/rerun-io/rerun/pull/1936) +- Fix backslashes in `arkitscene` rigid transformation path [#1938](https://github.com/rerun-io/rerun/pull/1938) +- Fix hover/select highlights when picking single points in a scene with multiple point clouds [#1942](https://github.com/rerun-io/rerun/pull/1942) +- Fix hovering depth clouds [#1943](https://github.com/rerun-io/rerun/pull/1943) + +#### 🚀 Performance Improvements +- batching 4: retire `MsgBundle` + batching support in transport layer [#1679](https://github.com/rerun-io/rerun/pull/1679) +- Optimize the depth-cloud shader when `depth=0` [#1729](https://github.com/rerun-io/rerun/pull/1729) +- `arrow2_convert` primitive (de)serialization benchmarks [#1742](https://github.com/rerun-io/rerun/pull/1742) +- `arrow2` `estimated_bytes_size` benchmarks [#1743](https://github.com/rerun-io/rerun/pull/1743) +- `arrow2` erased refcounted clones benchmarks [#1745](https://github.com/rerun-io/rerun/pull/1745) +- benchmarks for common vector ops across `smallvec`/`tinyvec`/std [#1747](https://github.com/rerun-io/rerun/pull/1747) +- Columnar `TimePoint`s in data tables and during transport [#1767](https://github.com/rerun-io/rerun/pull/1767) +- Compile with `panic = "abort"` [#1813](https://github.com/rerun-io/rerun/pull/1813) +- Process 2D points per entities like 3D points [#1820](https://github.com/rerun-io/rerun/pull/1820) +- re_query: use latest data types (`DataRow`/`DataCell`) [#1828](https://github.com/rerun-io/rerun/pull/1828) +- Depth cloud textures are now cached frame-to-frame [#1913](https://github.com/rerun-io/rerun/pull/1913) + +#### 🧑‍🏫 Examples +- Add new `ARKitScenes` example [#1538](https://github.com/rerun-io/rerun/pull/1538) (thanks [@pablovela5620](https://github.com/pablovela5620)!) +- New example code for Facebook research's `segment-anything` [#1788](https://github.com/rerun-io/rerun/pull/1788) +- Add `minimal_options` example for Rust SDK [#1773](https://github.com/rerun-io/rerun/pull/1773) (thanks [@h3mosphere](https://github.com/h3mosphere)!) +- Remove manual depth projection from `car` and `nyud` examples [#1869](https://github.com/rerun-io/rerun/pull/1869) +- Always spawn instead of fork in multiprocessing example [#1922](https://github.com/rerun-io/rerun/pull/1922) +- Add `--num-frames` arg to canny (webcam) example [#1923](https://github.com/rerun-io/rerun/pull/1923) +- Add argument parsing to `rerun_demo` [#1925](https://github.com/rerun-io/rerun/pull/1925) +- Join threads at end of `multithreading` example [#1934](https://github.com/rerun-io/rerun/pull/1934) + +#### 📚 Docs +- Add `typing_extensions` to `requirements-doc.txt` [#1786](https://github.com/rerun-io/rerun/pull/1786) +- Fix typos in notebook readme [#1852](https://github.com/rerun-io/rerun/pull/1852) +- Update docs related to notebook [#1915](https://github.com/rerun-io/rerun/pull/1915) + +#### 🖼 UI Improvements +- Hover rays for tracked 3D cameras [#1751](https://github.com/rerun-io/rerun/pull/1751) +- Collapse space-view by default if there is only one child [#1762](https://github.com/rerun-io/rerun/pull/1762) +- Option to show scene bounding box [#1770](https://github.com/rerun-io/rerun/pull/1770) +- Assign default colors to class-ids when annotation context is missing [#1783](https://github.com/rerun-io/rerun/pull/1783) +- Add Restart command and keyboard shortcut for moving time to start of timeline [#1802](https://github.com/rerun-io/rerun/pull/1802) (thanks [@h3mosphere](https://github.com/h3mosphere)!) +- New option to disable persistent storage [#1825](https://github.com/rerun-io/rerun/pull/1825) +- Show previews of colormaps when selecting them [#1846](https://github.com/rerun-io/rerun/pull/1846) +- Smooth out scroll wheel input for camera zooming [#1920](https://github.com/rerun-io/rerun/pull/1920) + +#### 🤷‍♂️ Other Viewer Improvements +- Change `EntityPathHash` to be 64 bit [#1723](https://github.com/rerun-io/rerun/pull/1723) +- Central `GpuReadback` handling for re_viewer, experimental space view screenshots [#1717](https://github.com/rerun-io/rerun/pull/1717) +- Readback depth from GPU picking [#1752](https://github.com/rerun-io/rerun/pull/1752) +- Use GPU picking for points, streamline/share picking code some more [#1814](https://github.com/rerun-io/rerun/pull/1814) +- Use GPU picking for line(like) primitives, fix `interactive` flags [#1829](https://github.com/rerun-io/rerun/pull/1829) +- Use GPU colormapping when showing images in the GUI [#1865](https://github.com/rerun-io/rerun/pull/1865) + +#### 🕸️ Web +- Make CI publish `latest` tagged web-viewer to `app.rerun.io` [#1725](https://github.com/rerun-io/rerun/pull/1725) +- Implement `re_tuid::Tuid::random()` on web [#1796](https://github.com/rerun-io/rerun/pull/1796) +- Refactor the relationship between the assorted web / websocket servers [#1844](https://github.com/rerun-io/rerun/pull/1844) +- Notebooks: make `presentation_id` consistent and use data-attribute for rrd [#1881](https://github.com/rerun-io/rerun/pull/1881) +- 2.5GB before GC kick in on web [#1944](https://github.com/rerun-io/rerun/pull/1944) + +#### 🎨 Renderer Improvements +- GPU based picking with points [#1721](https://github.com/rerun-io/rerun/pull/1721) +- improved renderer label handling [#1731](https://github.com/rerun-io/rerun/pull/1731) +- Improved readback data handling [#1734](https://github.com/rerun-io/rerun/pull/1734) +- GPU based mesh picking [#1737](https://github.com/rerun-io/rerun/pull/1737) +- Improve dealing with raw buffers for texture read/write [#1744](https://github.com/rerun-io/rerun/pull/1744) +- GPU colormapping, first step [#1835](https://github.com/rerun-io/rerun/pull/1835) +- GPU tensor colormapping [#1841](https://github.com/rerun-io/rerun/pull/1841) +- GPU picking for depth clouds [#1849](https://github.com/rerun-io/rerun/pull/1849) +- Implement billinear filtering of textures [#1850](https://github.com/rerun-io/rerun/pull/1850) [#1859](https://github.com/rerun-io/rerun/pull/1859) [#1860](https://github.com/rerun-io/rerun/pull/1860) +- Refactor: remove `GpuTexture2DHandle::invalid` [#1866](https://github.com/rerun-io/rerun/pull/1866) +- Fix filtering artifact for non-color images [#1886](https://github.com/rerun-io/rerun/pull/1886) +- Refactor: Add helper functions to `GpuTexture2DHandle` [#1900](https://github.com/rerun-io/rerun/pull/1900) + +#### 🛢 Datastore Improvements +- Datastore: revamp bench suite [#1733](https://github.com/rerun-io/rerun/pull/1733) +- Datastore revamp 1: new indexing model & core datastructures [#1727](https://github.com/rerun-io/rerun/pull/1727) +- Datastore revamp 2: serialization & formatting [#1735](https://github.com/rerun-io/rerun/pull/1735) +- Datastore revamp 3: efficient incremental stats [#1739](https://github.com/rerun-io/rerun/pull/1739) +- Datastore revamp 4: sunset `MsgId` [#1785](https://github.com/rerun-io/rerun/pull/1785) +- Datastore revamp 5: `DataStore::to_data_tables()` [#1791](https://github.com/rerun-io/rerun/pull/1791) +- Datastore revamp 6: sunset `LogMsg` storage + save store to disk [#1795](https://github.com/rerun-io/rerun/pull/1795) +- Datastore revamp 7: garbage collection [#1801](https://github.com/rerun-io/rerun/pull/1801) +- Incremental metadata registry stats [#1833](https://github.com/rerun-io/rerun/pull/1833) + +#### ✨ Other Enhancement + +#### 🗣 Merged RFCs +- RFC: datastore state of the union & end-to-end batching [#1610](https://github.com/rerun-io/rerun/pull/1610) + +#### 🧑‍💻 Dev-experience +- Post-release cleanup [#1726](https://github.com/rerun-io/rerun/pull/1726) +- Remove unnecessary dependencies [#1711](https://github.com/rerun-io/rerun/pull/1711) (thanks [@vsuryamurthy](https://github.com/vsuryamurthy)!) +- Use copilot markers in PR template [#1784](https://github.com/rerun-io/rerun/pull/1784) +- re_format: barebone support for custom formatting [#1776](https://github.com/rerun-io/rerun/pull/1776) +- Refactor: Add new helper crate `re_log_encoding` [#1772](https://github.com/rerun-io/rerun/pull/1772) +- `setup_web.sh` supports pacman package manager [#1797](https://github.com/rerun-io/rerun/pull/1797) (thanks [@urholaukkarinen](https://github.com/urholaukkarinen)!) +- Add `rerun --strict`: crash if any warning or error is logged [#1812](https://github.com/rerun-io/rerun/pull/1812) +- End-to-end testing of python logging -> store ingestion [#1817](https://github.com/rerun-io/rerun/pull/1817) +- Fix e2e test on CI: Don't try to re-build `rerun-sdk` [#1821](https://github.com/rerun-io/rerun/pull/1821) +- Install the rerun-sdk in CI using `--no-index` and split out linux wheel build to run first [#1838](https://github.com/rerun-io/rerun/pull/1838) +- Remove more unused dependencies [#1863](https://github.com/rerun-io/rerun/pull/1863) +- Improve end-to-end testing slightly [#1862](https://github.com/rerun-io/rerun/pull/1862) +- Turn off benchmarks comment in each PR [#1872](https://github.com/rerun-io/rerun/pull/1872) +- Fix double-negation in `scripts/run_python_e2e_test.py` [#1896](https://github.com/rerun-io/rerun/pull/1896) +- Improve PR template with better comment, and no copilot by default [#1901](https://github.com/rerun-io/rerun/pull/1901) +- Optimize `generate_changelog.py` [#1912](https://github.com/rerun-io/rerun/pull/1912) + +#### 🤷‍♂️ Other +- Fix videos for GitHub in `CHANGELOG.md` [af7d3b192157f942e35f64d3561a9a8dbcc18bfa](https://github.com/rerun-io/rerun/commit/af7d3b192157f942e35f64d3561a9a8dbcc18bfa) +- Don't run 3rd party bench suites on CI [#1787](https://github.com/rerun-io/rerun/pull/1787) +- Remove `TensorTrait` [#1819](https://github.com/rerun-io/rerun/pull/1819) +- Disable wheel tests for `x86_64-apple-darwin` [#1853](https://github.com/rerun-io/rerun/pull/1853) +- Update `enumflags2` to non-yanked version [#1874](https://github.com/rerun-io/rerun/pull/1874) +- Collect extra egui features into the main `Cargo.toml` [#1926](https://github.com/rerun-io/rerun/pull/1926) +- `just rs-run-all` [b14087b40bd805c95f030a4c7d3fb7a0482e13f4](https://github.com/rerun-io/rerun/commit/b14087b40bd805c95f030a4c7d3fb7a0482e13f4) +- `just py-run-all-{native|web|rrd}` [#1927](https://github.com/rerun-io/rerun/pull/1927) + ## [0.4.0](https://github.com/rerun-io/rerun/compare/v0.3.1...v0.4.0) - Outlines, web viewer and performance improvements https://user-images.githubusercontent.com/1220815/228241887-03b311e2-80e9-4541-9281-6d334a15ab04.mp4 -## Overview & Highlights +### Overview & Highlights * Add support for mesh vertex colors [#1671](https://github.com/rerun-io/rerun/pull/1671) * Lower memory use [#1535](https://github.com/rerun-io/rerun/pull/1535) * Improve garbage collection [#1560](https://github.com/rerun-io/rerun/pull/1560) diff --git a/Cargo.lock b/Cargo.lock index 60f09380c940..23241a28a3c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "ab_glyph" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcdbc68024b653943864d436fe8a24b028095bc1cf91a8926f8241e4aaffe59" +checksum = "fe21446ad43aa56417a767f3e2f3d7c4ca522904de1dd640529a76e9c5c3b33c" dependencies = [ "ab_glyph_rasterizer", "owned_ttf_parser", @@ -14,15 +14,15 @@ dependencies = [ [[package]] name = "ab_glyph_rasterizer" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "330223a1aecc308757b9926e9391c9b47f8ef2dbd8aea9df88312aea18c5e8d6" +checksum = "c71b1793ee61086797f5c80b6efa2b8ffa6d5dd703f118545808a7f2e27f7046" [[package]] name = "accesskit" -version = "0.9.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4803cf8c252f374ae6bfbb341e49e5a37f7601f2ce74a105927a663eba952c67" +checksum = "704d532b1cd3d912bb37499c55a81ac748cc1afa737eedd100ba441acdd47d38" dependencies = [ "enumn", "serde", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" dependencies = [ "gimli", ] @@ -56,9 +56,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", "const-random", @@ -79,12 +79,12 @@ dependencies = [ [[package]] name = "android-activity" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4165a1aef703232031b40a6e8908c2f9e314d495f11aa7f98db75d39a497cc6a" +checksum = "7c77a0045eda8b888c76ea473c2b0515ba6f471d318f8927c5c72240937035a6" dependencies = [ "android-properties", - "bitflags", + "bitflags 1.3.2", "cc", "jni-sys", "libc", @@ -116,18 +116,58 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" +[[package]] +name = "anstream" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "342258dd14006105c2b75ab1bd7543a03bdf0cfc94383303ac212a04939dff6f" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-wincon", + "concolor-override", + "concolor-query", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" + +[[package]] +name = "anstyle-parse" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7d1bb534e9efed14f3e5f44e7dd1a4f709384023a4165199a4241e18dff0116" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-wincon" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3127af6145b149f3287bb9a0d10ad9c5692dba8c53ad48285e5bec4063834fa" +dependencies = [ + "anstyle", + "windows-sys 0.45.0", +] + [[package]] name = "anyhow" -version = "1.0.66" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" +checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" [[package]] name = "api_demo" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", - "clap 4.1.4", + "clap 4.2.1", "glam", "itertools", "ndarray", @@ -170,9 +210,9 @@ checksum = "bf7d0a018de4f6aa429b9d33d69edf69072b1c5b1cb8d3e4a5f7ef898fc3eb76" [[package]] name = "arrayref" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" [[package]] name = "arrayvec" @@ -196,7 +236,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a4c5b03335bc1cb0fd9f5297f8fd3bbfd6fb04f3cb0bc7d6c91b7128cb8336a" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "arrow-format", "bytemuck", "chrono", @@ -236,7 +276,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -298,6 +338,21 @@ dependencies = [ "slab", ] +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + [[package]] name = "async-io" version = "1.13.0" @@ -312,7 +367,7 @@ dependencies = [ "log", "parking", "polling", - "rustix 0.37.3", + "rustix", "slab", "socket2", "waker-fn", @@ -320,12 +375,11 @@ dependencies = [ [[package]] name = "async-lock" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" +checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" dependencies = [ "event-listener", - "futures-lite", ] [[package]] @@ -336,52 +390,85 @@ checksum = "d7d78656ba01f1b93024b7c3a0467f1608e4be67d725749fdcd7d2c7678fd7a2" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", ] [[package]] name = "async-stream" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +checksum = "ad445822218ce64be7a341abfb0b1ea43b5c23aa83902542a4542e78309d8e5e" dependencies = [ "async-stream-impl", "futures-core", + "pin-project-lite", ] [[package]] name = "async-stream-impl" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +checksum = "e4655ae1a7b0cdf149156f780c5bf3f1352bc53cbd9e0a361a7ef7b22947e965" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "async-task" -version = "4.3.0" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" +checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" [[package]] name = "async-trait" -version = "0.1.66" +version = "0.1.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84f9ebcc6c1f5b8cb160f6990096a5c127f423fcb6e1ccc46c370cbdfb75dfc" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] +[[package]] +name = "atomic-waker" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" + [[package]] name = "atomic_refcell" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b5e5f48b927f04e952dedc932f31995a65a0bf65ec971c74436e51bf6e970d" +checksum = "857253367827bd9d0fd973f0ef15506a96e79e41b0ad7aa691203a4e3214f6c8" [[package]] name = "atty" @@ -408,9 +495,9 @@ checksum = "7b7e4c2464d97fe331d41de9d5db0def0a96f4d823b8b32a2efd503578988973" [[package]] name = "backtrace" -version = "0.3.66" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" dependencies = [ "addr2line", "cc", @@ -467,6 +554,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3" + [[package]] name = "block" version = "0.1.6" @@ -475,9 +568,9 @@ checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] @@ -501,30 +594,45 @@ dependencies = [ "objc2-encode", ] +[[package]] +name = "blocking" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77231a1c8f801696fc0123ec6150ce92cffb8e164a02afb9c8ddee0e9b65ad65" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", + "log", +] + [[package]] name = "bumpalo" -version = "3.11.1" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bytemuck" -version = "1.12.3" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaa3a8d9a1ca92e282c96a32d6511b695d7d994d1d102ba85d279f9b2756947f" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4" +checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] @@ -535,18 +643,18 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "calloop" -version = "0.10.2" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eb0438b3c6d262395fe30e6de9a61beb57ea56290b00a07f227fe6e20cbf2" +checksum = "1a59225be45a478d772ce015d9743e49e92798ece9e34eda9a6aa2a6a7f40192" dependencies = [ "log", - "nix 0.24.2", + "nix 0.25.1", "slotmap", "thiserror", "vec_map", @@ -554,9 +662,9 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.1" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e" +checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2" dependencies = [ "serde", ] @@ -572,20 +680,21 @@ dependencies = [ [[package]] name = "cargo-run-wasm" -version = "0.2.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611b811fad83eebfcdcf47ae1e425c82d1249608bc571d537448d706be08cf27" +checksum = "cc1e37cf14ef470ed74ec2a8b95e51b8623bcf6f76d24f233ebaeb209f766230" dependencies = [ "devserver_lib", "pico-args", + "serde_json", "wasm-bindgen-cli-support", ] [[package]] name = "cargo_metadata" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a" +checksum = "08a1ec454bc3eead8719cb56e15dbbfecdbc14e4b3a3ae4936cc6e31f5fc0d07" dependencies = [ "camino", "cargo-platform", @@ -603,9 +712,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.77" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" dependencies = [ "jobserver", ] @@ -630,15 +739,15 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chrono" -version = "0.4.23" +version = "0.4.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" dependencies = [ "iana-time-zone", "js-sys", "num-integer", "num-traits", - "time 0.1.44", + "time 0.1.45", "wasm-bindgen", "winapi", ] @@ -676,7 +785,7 @@ version = "3.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ - "bitflags", + "bitflags 1.3.2", "clap_lex 0.2.4", "indexmap", "textwrap", @@ -684,30 +793,38 @@ dependencies = [ [[package]] name = "clap" -version = "4.1.4" +version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f13b9c79b5d1dd500d20ef541215a6423c75829ef43117e1b4d17fd8af0b5d76" +checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3" dependencies = [ - "bitflags", + "clap_builder", "clap_derive", - "clap_lex 0.3.0", - "is-terminal", "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223163f58c9a40c3b0a43e1c4b50a9ce09f007ea2cb1ec258a687945b4b7929f" +dependencies = [ + "anstream", + "anstyle", + "bitflags 1.3.2", + "clap_lex 0.4.1", "strsim", - "termcolor", ] [[package]] name = "clap_derive" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "684a277d672e91966334af371f1a7b5833f9aa00b07c84e92fbce95e00208ce8" +checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" dependencies = [ - "heck 0.4.0", - "proc-macro-error", + "heck 0.4.1", "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] @@ -721,12 +838,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8" -dependencies = [ - "os_str_bytes", -] +checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" [[package]] name = "clean-path" @@ -736,9 +850,9 @@ checksum = "aaa6b4b263a5d737e9bf6b7c09b72c41a5480aec4d7219af827f6564e950b6a5" [[package]] name = "clipboard-win" -version = "4.4.2" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4ab1b92798304eedc095b53942963240037c0516452cb11aeba709d420b2219" +checksum = "7191c27c2357d9b7ef96baac1773290d4ca63b24205b82a3fd8a0637afcf0362" dependencies = [ "error-code", "str-buf", @@ -751,7 +865,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f425db7937052c684daec3bd6375c8abe2d146dca4b8b143d6db777c39138f3a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "block", "cocoa-foundation", "core-foundation", @@ -763,11 +877,11 @@ dependencies = [ [[package]] name = "cocoa-foundation" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ade49b65d560ca58c403a479bb396592b155c0185eada742ee323d1d68d6318" +checksum = "931d3837c286f56e3c58423ce4eba12d08db2374461a785c86f672b08b5650d6" dependencies = [ - "bitflags", + "bitflags 1.3.2", "block", "core-foundation", "core-graphics-types", @@ -831,26 +945,40 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "concolor-override" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a855d4a1978dc52fb0536a04d384c2c0c1aa273597f08b77c8c4d3b2eec6037f" + +[[package]] +name = "concolor-query" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d11d52c3d7ca2e6d0040212be9e4dbbcd78b6447f535b6b561f449427944cf" +dependencies = [ + "windows-sys 0.45.0", +] + [[package]] name = "concurrent-queue" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7bef69dc86e3c610e4e7aed41035e2a7ed12e72dd7530f61327a6579a4390b" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" -version = "0.15.2" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c050367d967ced717c04b65d8c619d863ef9292ce0c5760028655a2fb298718c" +checksum = "c3d79fbe8970a77e3e34151cc13d3b3e248aa0faaecb9f6091fa07ebefe5ad60" dependencies = [ "encode_unicode", "lazy_static", "libc", - "terminal_size", - "winapi", + "windows-sys 0.42.0", ] [[package]] @@ -907,7 +1035,7 @@ version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-graphics-types", "foreign-types", @@ -920,7 +1048,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a68b68b3446082644c91ac778bf50cd4104bfb002b5a6a7c44cca5a2c70788b" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "foreign-types", "libc", @@ -928,9 +1056,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" dependencies = [ "libc", ] @@ -996,9 +1124,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" dependencies = [ "cfg-if", "crossbeam-utils", @@ -1006,9 +1134,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -1017,14 +1145,14 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.7.1", + "memoffset 0.8.0", "scopeguard", ] @@ -1040,9 +1168,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ "cfg-if", ] @@ -1053,7 +1181,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" dependencies = [ - "bitflags", + "bitflags 1.3.2", "crossterm_winapi", "libc", "mio", @@ -1089,26 +1217,30 @@ dependencies = [ ] [[package]] -name = "ctrlc" -version = "3.2.2" +name = "ctor" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b37feaa84e6861e00a1f5e5aa8da3ee56d605c9992d33e082786754828e20865" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" dependencies = [ - "nix 0.24.2", - "winapi", + "quote", + "syn 1.0.109", ] [[package]] -name = "cty" -version = "0.2.2" +name = "ctrlc" +version = "3.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" +dependencies = [ + "nix 0.26.2", + "windows-sys 0.45.0", +] [[package]] name = "cxx" -version = "1.0.82" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a41a86530d0fe7f5d9ea779916b7cadd2d4f9add748b99c2c029cbbdfaf453" +checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93" dependencies = [ "cc", "cxxbridge-flags", @@ -1118,9 +1250,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.82" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06416d667ff3e3ad2df1cd8cd8afae5da26cf9cec4d0825040f88b5ca659a2f0" +checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b" dependencies = [ "cc", "codespan-reporting", @@ -1128,42 +1260,41 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] name = "cxxbridge-flags" -version = "1.0.82" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "820a9a2af1669deeef27cb271f476ffd196a2c4b6731336011e0ba63e2c7cf71" +checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb" [[package]] name = "cxxbridge-macro" -version = "1.0.82" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08a6e2fcc370a089ad3b4aaf54db3b1b4cee38ddabce5896b33eb693275f470" +checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] name = "d3d12" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8f0de2f5a8e7bd4a9eec0e3c781992a4ce1724f68aec7d7a3715344de8b39da" +source = "git+https://github.com/gfx-rs/d3d12-rs?rev=b940b1d71#b940b1d71ab7083ae80eec697872672dc1f2bd32" dependencies = [ - "bitflags", + "bitflags 1.3.2", "libloading", "winapi", ] [[package]] name = "darling" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" dependencies = [ "darling_core", "darling_macro", @@ -1171,26 +1302,26 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -1201,14 +1332,14 @@ checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "devserver_lib" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb4b71acc1405be2431a93892a79a0d82ed5ba6885649ddbdfc62caa4d67b1c" +checksum = "edf215dbb8cb1409cca7645aaed35f9e39fb0a21855bba1ac48bc0334903bf66" [[package]] name = "digest" @@ -1278,7 +1409,7 @@ dependencies = [ [[package]] name = "dna" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "itertools", "rand", @@ -1287,9 +1418,9 @@ dependencies = [ [[package]] name = "document-features" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3267e1ade4f1f6ddd35fed44a04b6514e244ffeda90c6a14a9ee30f9c9fd7a1" +checksum = "e493c573fce17f00dcab13b6ac057994f3ce17d1af4dc39bfd482b83c6eb6157" dependencies = [ "litrs", ] @@ -1302,15 +1433,14 @@ checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" [[package]] name = "dyn-clone" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f94fa09c2aeea5b8839e414b7b841bf429fd25b9c522116ac97ee87856d88b2" +checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" [[package]] name = "ecolor" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f99fe3cac305af9d6d92971af60d0f7ea4d783201ef1673571567b6699964d9" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "bytemuck", "serde", @@ -1319,16 +1449,19 @@ dependencies = [ [[package]] name = "eframe" version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df3ce60931e5f2d83bab4484d1a283908534d5308cc6b0c5c22c59cd15ee7cc" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "bytemuck", + "cocoa", "directories-next", "egui", "egui-wgpu", "egui-winit", "egui_glow", + "image", "js-sys", + "log", + "objc", "percent-encoding", "pollster", "puffin", @@ -1336,39 +1469,38 @@ dependencies = [ "ron", "serde", "thiserror", - "tracing", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "wgpu", + "winapi", "winit", ] [[package]] name = "egui" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6412a21e0bde7c0918f7fb44bbbb86b5e1f88e63c026a4e747cc7af02f76dfbe" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "accesskit", - "ahash 0.8.2", + "ahash 0.8.3", "epaint", + "log", "nohash-hasher", "ron", "serde", - "tracing", ] [[package]] name = "egui-wgpu" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1678d8f30181193e78c15dddae16e6027c6058fdda9631950ade511b8a4b26f5" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "bytemuck", "epaint", + "log", "puffin", - "tracing", + "thiserror", "type-map", "wgpu", "winit", @@ -1377,26 +1509,25 @@ dependencies = [ [[package]] name = "egui-winit" version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab43597ba41f0ce39a364ad83185594578bfd8b3409b99dbcbb01df23afc3dbb" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ - "android-activity", "arboard", "egui", "instant", + "log", "puffin", + "raw-window-handle", "serde", "smithay-clipboard", - "tracing", "webbrowser", "winit", ] [[package]] name = "egui_dock" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f86bdfe987f753ffcdf896932f09babfc63580b21e1899c304166f0befc85c8" +checksum = "be7e6eb63cb936413bd2a4f54be4a9ef53a48252f25864f5f946d4954d7332bd" dependencies = [ "egui", "serde", @@ -1405,27 +1536,25 @@ dependencies = [ [[package]] name = "egui_extras" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f051342e97dfa2445107cb7d2e720617f5c840199b5cb4fe0ffcf481fcf5cce" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "egui", + "log", "serde", - "tracing", ] [[package]] name = "egui_glow" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8257332fb168a965b3dca81d6a344e053153773c889cabdba0b3b76f1629ae81" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "bytemuck", "egui", "egui-winit", "glow", + "log", "memoffset 0.6.5", "puffin", - "tracing", "wasm-bindgen", "web-sys", ] @@ -1445,15 +1574,14 @@ dependencies = [ [[package]] name = "either" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" [[package]] name = "emath" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ecd80612937e0267909d5351770fe150004e24dab93954f69ca62eecd3f77e" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "bytemuck", "serde", @@ -1483,18 +1611,18 @@ checksum = "5e9a1f9f7d83e59740248a6e14ecf93929ade55027844dfcea78beafccc15745" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.12", ] [[package]] name = "enumn" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88bcb3a067a6555d577aba299e75eff9942da276e6506fc6274327daa026132" +checksum = "48016319042fb7c87b78d2993084a831793a897a5cd1a2a67cab9d1eeb4b7d76" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] @@ -1515,7 +1643,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -1534,15 +1662,15 @@ dependencies = [ [[package]] name = "epaint" version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12e78b5c58a1f7f621f9d546add2adce20636422c9b251e29f749e8a2f713c95" +source = "git+https://github.com/emilk/egui?rev=f76eefb98d23cbf71989255aafe75a07d343f6ed#f76eefb98d23cbf71989255aafe75a07d343f6ed" dependencies = [ "ab_glyph", - "ahash 0.8.2", + "ahash 0.8.3", "atomic_refcell", "bytemuck", "ecolor", "emath", + "log", "nohash-hasher", "parking_lot 0.12.1", "serde", @@ -1558,21 +1686,10 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 1.0.103", + "syn 1.0.109", "synstructure", ] -[[package]] -name = "errno" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" -dependencies = [ - "errno-dragonfly", - "libc", - "winapi", -] - [[package]] name = "errno" version = "0.3.0" @@ -1637,30 +1754,30 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] [[package]] name = "filetime" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3" +checksum = "8a3de6e8d11b22ff9edc6d916f890800597d60f8b2da1caf2955c274638d6412" dependencies = [ "cfg-if", "libc", "redox_syscall 0.2.16", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] name = "fixed" -version = "1.20.0" +version = "1.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418922d2c280b8c68f82699494cc8c48f392233233a9a8b9a48a57a36c0ad0ef" +checksum = "79386fdcec5e0fde91b1a6a5bcd89677d1f9304f7f986b154a1b9109038854d9" dependencies = [ "az", "bytemuck", @@ -1677,9 +1794,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" dependencies = [ "crc32fast", "miniz_oxide", @@ -1732,9 +1849,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", @@ -1747,9 +1864,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" dependencies = [ "futures-core", "futures-sink", @@ -1757,15 +1874,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" [[package]] name = "futures-executor" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" dependencies = [ "futures-core", "futures-task", @@ -1774,9 +1891,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" [[package]] name = "futures-lite" @@ -1795,32 +1912,32 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] name = "futures-sink" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ "futures-channel", "futures-core", @@ -1834,20 +1951,11 @@ dependencies = [ "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -1878,9 +1986,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.26.2" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" +checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" [[package]] name = "glam" @@ -1899,11 +2007,23 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "glow" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8edf6019dff2d92ad27c1e3ff82ad50a0aea5b01370353cc928bfdc33e95925c" +checksum = "4e007a07a24de5ecae94160f141029e9a347282cfe25d1d58d85d845cf3130f1" dependencies = [ "js-sys", "slotmap", @@ -1934,7 +2054,7 @@ dependencies = [ "inflections", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -1955,7 +2075,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fc59e5f710e310e76e6707f86c561dd646f69a8876da9131703b2f717de818d" dependencies = [ - "bitflags", + "bitflags 1.3.2", "gpu-alloc-types", ] @@ -1965,7 +2085,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54804d0d6bc9d7f26db4eaec1ad10def69b599315f487d32c334a80d1efe67a5" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1978,7 +2098,7 @@ dependencies = [ "log", "thiserror", "winapi", - "windows", + "windows 0.44.0", ] [[package]] @@ -1987,7 +2107,7 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b0c02e1ba0bdb14e965058ca34e09c020f8e507a760df1121728e0aef68d57a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "gpu-descriptor-types", "hashbrown 0.12.3", ] @@ -1998,14 +2118,14 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "363e3677e55ad168fef68cf9de3a4a310b53124c5e784c53a1d70e92d23f2126" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "h2" -version = "0.3.15" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +checksum = "17f8a914c2987b688368b5138aa05321db91f4090cf26118185672ad588bce21" dependencies = [ "bytes", "fnv", @@ -2054,21 +2174,21 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "rayon", ] [[package]] name = "hassle-rs" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90601c6189668c7345fc53842cb3f3a3d872203d523be1b3cb44a36a3e62fb85" +checksum = "1397650ee315e8891a0df210707f0fc61771b0cc518c3023896064c5407cb3b0" dependencies = [ - "bitflags", + "bitflags 1.3.2", "com-rs", "libc", "libloading", @@ -2088,9 +2208,9 @@ dependencies = [ [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" @@ -2110,6 +2230,12 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + [[package]] name = "hex" version = "0.4.3" @@ -2124,9 +2250,9 @@ checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df" [[package]] name = "http" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", "fnv", @@ -2164,9 +2290,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.23" +version = "0.14.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034711faac9d2166cb1baf1a2fb0b60b1f277f8492fd72176c17f3515e1abd3c" +checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" dependencies = [ "bytes", "futures-channel", @@ -2188,16 +2314,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.53" +version = "0.1.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +checksum = "0c17cc76786e99f8d2f055c11159e7f0091c42474dcc3189fbab96072e873e6d" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "winapi", + "windows 0.46.0", ] [[package]] @@ -2234,9 +2360,9 @@ dependencies = [ [[package]] name = "image" -version = "0.24.5" +version = "0.24.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69b7ea949b537b0fd0af141fff8c77690f2ce96f4f41f042ccb6c69c6c965945" +checksum = "527909aa81e20ac3a44803521443a765550f09b5130c2c2fa1ea59c2f8f50a3a" dependencies = [ "bytemuck", "byteorder", @@ -2256,9 +2382,9 @@ checksum = "d9f1a0777d972970f204fdf8ef319f1f4f8459131636d7e3c96c5d59570d0fa6" [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -2266,9 +2392,9 @@ dependencies = [ [[package]] name = "indoc" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3" +checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" [[package]] name = "inflections" @@ -2282,7 +2408,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" dependencies = [ - "bitflags", + "bitflags 1.3.2", "inotify-sys", "libc", ] @@ -2298,9 +2424,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.23.0" +version = "1.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48b08a091dfe5b09a6a9688c468fdd5b4396e92ce09e2eb932f0884b02788a4" +checksum = "9a28d25139df397cbca21408bb742cf6837e04cdbebf1b07b760caf971d6a972" dependencies = [ "console", "lazy_static", @@ -2323,24 +2449,25 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.4" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" +checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb" dependencies = [ + "hermit-abi 0.3.1", "libc", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] name = "is-terminal" -version = "0.4.2" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189" +checksum = "256017f749ab3117e93acb91063009e1f1bb56d03965b14c2c8df4eb02c524d8" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi 0.3.1", "io-lifetimes", - "rustix 0.36.7", - "windows-sys 0.42.0", + "rustix", + "windows-sys 0.45.0", ] [[package]] @@ -2354,22 +2481,24 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "jni" -version = "0.20.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "039022cdf4d7b1cf548d31f60ae783138e5fd42013f6271049d7df7afadef96c" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" dependencies = [ "cesu8", + "cfg-if", "combine", "jni-sys", "log", "thiserror", "walkdir", + "windows-sys 0.45.0", ] [[package]] @@ -2380,9 +2509,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" dependencies = [ "libc", ] @@ -2429,10 +2558,19 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587" dependencies = [ - "bitflags", + "bitflags 1.3.2", "libc", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -2511,9 +2649,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.137" +version = "0.2.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" +checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libloading" @@ -2533,9 +2671,9 @@ checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" [[package]] name = "libmimalloc-sys" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04d1c67deb83e6b75fa4fe3309e09cfeade12e7721d95322af500d3814ea60c9" +checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174" dependencies = [ "cc", "libc", @@ -2543,9 +2681,9 @@ dependencies = [ [[package]] name = "link-cplusplus" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" dependencies = [ "cc", ] @@ -2558,15 +2696,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" - -[[package]] -name = "linux-raw-sys" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd550e73688e6d578f0ac2119e32b797a327631a42f9433e59d02e139c8df60d" +checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f" [[package]] name = "litrs" @@ -2591,6 +2723,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", + "value-bag", ] [[package]] @@ -2604,9 +2737,9 @@ dependencies = [ [[package]] name = "macaw" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f5e66b08df1bc3a0249cadf7f2802ea7c09dc906af44c62b2889d721df1e252" +checksum = "ed1f4d97d94b7d276711c5b2a74f91c2a90ab532198683bb648240ddc0488cf7" dependencies = [ "glam", "num-traits", @@ -2639,9 +2772,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" dependencies = [ "libc", ] @@ -2655,15 +2788,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.8.0" @@ -2675,9 +2799,9 @@ dependencies = [ [[package]] name = "memory-stats" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d3f458a3076e337e027943b8cb5f4f72a6651714f9258fc7dcd3ae3b51f0276" +checksum = "34f79cf9964c5c9545493acda1263f1912f8d2c56c8a2ffee2606cb960acaacc" dependencies = [ "libc", "winapi", @@ -2689,7 +2813,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de11355d1f6781482d027a3b4d4de7825dcedb197bf573e0596d00008402d060" dependencies = [ - "bitflags", + "bitflags 1.3.2", "block", "core-graphics-types", "foreign-types", @@ -2699,16 +2823,16 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2374e2999959a7b583e1811a1ddbf1d3a4b9496eceb9746f1192a59d871eca" +checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1" dependencies = [ "libmimalloc-sys", ] [[package]] name = "minimal" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "rerun", ] @@ -2721,19 +2845,19 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "minimal_options" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", - "clap 4.1.4", + "clap 4.2.1", "glam", "rerun", ] [[package]] name = "miniz_oxide" -version = "0.5.4" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" dependencies = [ "adler", ] @@ -2746,14 +2870,14 @@ checksum = "e53debba6bda7a793e5f99b8dacf19e626084f525f7829104ba9898f367d85ff" [[package]] name = "mio" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] @@ -2779,17 +2903,16 @@ checksum = "a8a3e2bde382ebf960c1f3e79689fa5941625fe9bf694a1cb64af3e85faff3af" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "naga" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eafe22a23b797c9bc227c6c896419b26b5bb88fa903417a3adaed08778850d5" +version = "0.12.0" +source = "git+https://github.com/gfx-rs/naga?rev=b99d58ea435090e561377949f428bce2c18451bb#b99d58ea435090e561377949f428bce2c18451bb" dependencies = [ "bit-set", - "bitflags", + "bitflags 1.3.2", "codespan-reporting", "hexf-parse", "indexmap", @@ -2832,7 +2955,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "451422b7e4718271c8b5b3aadf5adedba43dc76312454b387e98fae0fc951aa0" dependencies = [ - "bitflags", + "bitflags 1.3.2", "jni-sys", "ndk-sys", "num_enum", @@ -2855,13 +2978,19 @@ dependencies = [ "jni-sys", ] +[[package]] +name = "never" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c96aba5aa877601bb3f6dd6a63a969e1f82e60646e81e71b14496995e9853c91" + [[package]] name = "nix" version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cc", "cfg-if", "libc", @@ -2870,16 +2999,41 @@ dependencies = [ [[package]] name = "nix" -version = "0.24.2" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset 0.6.5", +] + +[[package]] +name = "nix" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" dependencies = [ - "bitflags", + "autocfg", + "bitflags 1.3.2", "cfg-if", "libc", "memoffset 0.6.5", ] +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "static_assertions", +] + [[package]] name = "nohash-hasher" version = "0.2.0" @@ -2888,9 +3042,9 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" [[package]] name = "nom" -version = "7.1.1" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", @@ -2898,11 +3052,11 @@ dependencies = [ [[package]] name = "notify" -version = "5.0.0" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a" +checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9" dependencies = [ - "bitflags", + "bitflags 1.3.2", "crossbeam-channel", "filetime", "fsevent-sys", @@ -2911,7 +3065,7 @@ dependencies = [ "libc", "mio", "walkdir", - "winapi", + "windows-sys 0.42.0", ] [[package]] @@ -2950,9 +3104,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ae39348c8bc5fbd7f40c727a9925f03517afd2ab27d46702108b6a7e5414c19" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" dependencies = [ "num-traits", ] @@ -2965,7 +3119,7 @@ checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -3013,33 +3167,33 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" dependencies = [ - "hermit-abi 0.1.19", + "hermit-abi 0.2.6", "libc", ] [[package]] name = "num_enum" -version = "0.5.7" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.5.7" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -3087,9 +3241,9 @@ checksum = "df3b9834c1e95694a05a828b59f55fa2afec6288359cda67146126b3f90a55d7" [[package]] name = "objc2" -version = "0.3.0-beta.3" +version = "0.3.0-beta.3.patch-leaks.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe31e5425d3d0b89a15982c024392815da40689aceb34bad364d58732bcfd649" +checksum = "e7d9bb2ee6b71d02b1b3554ed600d267ee9a2796acc9fa43fb7748e13fe072dd" dependencies = [ "block2", "objc-sys", @@ -3125,19 +3279,19 @@ dependencies = [ [[package]] name = "object" -version = "0.29.0" +version = "0.30.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" dependencies = [ "memchr", ] [[package]] name = "objectron" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", - "clap 4.1.4", + "clap 4.2.1", "glam", "prost", "prost-build", @@ -3146,9 +3300,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "oorandom" @@ -3158,9 +3312,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "orbclient" -version = "0.3.42" +version = "0.3.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba683f1641c11041c59d5d93689187abcab3c1349dc6d9d70c550c9f9360802f" +checksum = "974465c5e83cf9df05c1e4137b271d29035c902e39e5ad4c1939837e22160af8" dependencies = [ "cfg-if", "redox_syscall 0.2.16", @@ -3170,9 +3324,9 @@ dependencies = [ [[package]] name = "ordered-float" -version = "3.4.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d84eb1409416d254e4a9c8fa56cc24701755025b458f0fcd8e59e1f5f40c23bf" +checksum = "13a384337e997e6860ffbaa83708b2ef329fd8c54cb67a5f64d421e0f943254f" dependencies = [ "num-traits", ] @@ -3189,15 +3343,15 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.4.1" +version = "6.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" +checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" [[package]] name = "owned_ttf_parser" -version = "0.17.1" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18904d3c65493a9f0d7542293d1a7f69bfdc309a6b9ef4f46dc3e58b0577edc5" +checksum = "e25e9fb15717794fae58ab55c26e044103aad13186fbb625893f9a3bbcc24228" dependencies = [ "ttf-parser", ] @@ -3226,7 +3380,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", + "parking_lot_core 0.9.7", ] [[package]] @@ -3245,9 +3399,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.4" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "backtrace", "cfg-if", @@ -3256,14 +3410,14 @@ dependencies = [ "redox_syscall 0.2.16", "smallvec", "thread-id", - "windows-sys 0.42.0", + "windows-sys 0.45.0", ] [[package]] name = "paste" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" +checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" [[package]] name = "pathdiff" @@ -3279,9 +3433,9 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "petgraph" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" dependencies = [ "fixedbitset", "indexmap", @@ -3350,11 +3504,11 @@ dependencies = [ [[package]] name = "png" -version = "0.17.6" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f0e7f4c94ec26ff209cee506314212639d6c91b80afb82984819fafce9df01c" +checksum = "5d708eaf860a19b19ce538740d2b4bdeeb8337fa53f7738455e706623ad5c638" dependencies = [ - "bitflags", + "bitflags 1.3.2", "crc32fast", "flate2", "miniz_oxide", @@ -3362,29 +3516,29 @@ dependencies = [ [[package]] name = "polars-arrow" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38fad08b4d4d3e21e8935d5d3d4dfcbb5ca65ffc5fa19364c254751bc1d62f93" +checksum = "06e57a7b929edf6c73475dbc3f63d35152f14f4a9455476acc6127d770daa0f6" dependencies = [ "arrow2", - "hashbrown 0.13.1", + "hashbrown 0.13.2", "num", "thiserror", ] [[package]] name = "polars-core" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2256086865cfa7db31af5e66a088f0089bff1ba9692f7195d1661497cebdca05" +checksum = "5a440cd53916f1a87fac1fda36cd7cc2d226247b4d4570d96242da5fa7f07b2a" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "anyhow", "arrow2", - "bitflags", + "bitflags 1.3.2", "chrono", "comfy-table 6.1.4", - "hashbrown 0.13.1", + "hashbrown 0.13.2", "indexmap", "num", "once_cell", @@ -3400,9 +3554,9 @@ dependencies = [ [[package]] name = "polars-ops" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31168b91a6edddb8dc4dcc4b53af15e9ffefc23f0bff054c1975423e01f3ae15" +checksum = "36638340fd9f4377dab11f307877ebb5bdac3bc9b25ea32a771584de76e5280a" dependencies = [ "arrow2", "polars-arrow", @@ -3412,9 +3566,9 @@ dependencies = [ [[package]] name = "polars-utils" -version = "0.27.1" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda7fb126f8c77d0a106620fd525bc4fdd1c7e32cb100aa9a82ba3f7c969485a" +checksum = "a741a3325c544c97c7a9ff57d857f089b60041bd92b06c41582df6940ffaa05b" dependencies = [ "once_cell", "rayon", @@ -3431,16 +3585,18 @@ dependencies = [ [[package]] name = "polling" -version = "2.5.2" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6" +checksum = "7e1f879b2998099c2d69ab9605d145d5b661195627eccc680002c4918a7fb6fa" dependencies = [ "autocfg", + "bitflags 1.3.2", "cfg-if", + "concurrent-queue", "libc", "log", - "wepoll-ffi", - "windows-sys 0.42.0", + "pin-project-lite", + "windows-sys 0.45.0", ] [[package]] @@ -3457,23 +3613,22 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "prettyplease" -version = "0.1.23" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e97e3215779627f01ee256d2fad52f3d95e8e1c11e9fc6fd08f7cd455d5d5c78" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ "proc-macro2", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "proc-macro-crate" -version = "1.2.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ "once_cell", - "thiserror", - "toml", + "toml_edit", ] [[package]] @@ -3485,7 +3640,7 @@ dependencies = [ "proc-macro-error-attr", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", "version_check", ] @@ -3508,9 +3663,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "e472a104799c74b514a57226160104aa483546de37e839ec50e3c2e41dd87534" dependencies = [ "unicode-ident", ] @@ -3523,9 +3678,9 @@ checksum = "74605f360ce573babfe43964cbe520294dcb081afbf8c108fc6e23036b4da2df" [[package]] name = "prost" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21dc42e00223fc37204bd4aa177e69420c604ca4a183209a8f9de30c6d934698" +checksum = "e48e50df39172a3e7eb17e14642445da64996989bc212b583015435d39a58537" dependencies = [ "bytes", "prost-derive", @@ -3533,12 +3688,12 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f8ad728fb08fe212df3c05169e940fbb6d9d16a877ddde14644a983ba2012e" +checksum = "2c828f93f5ca4826f97fedcbd3f9a536c16b12cff3dbbb4a007f932bbad95b12" dependencies = [ "bytes", - "heck 0.4.0", + "heck 0.4.1", "itertools", "lazy_static", "log", @@ -3548,43 +3703,43 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 1.0.103", + "syn 1.0.109", "tempfile", "which", ] [[package]] name = "prost-derive" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda8c0881ea9f722eb9629376db3d0b903b462477c1aafcb0566610ac28ac5d" +checksum = "4ea9b0f8cbe5e15a8a042d030bd96668db28ecb567ec37d691971ff5731d2b1b" dependencies = [ "anyhow", "itertools", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "prost-types" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e0526209433e96d83d750dd81a99118edbc55739e7e61a46764fd2ad537788" +checksum = "379119666929a1afd7a043aa6cf96fa67a6dce9af60c88095a4686dbce4c9c88" dependencies = [ - "bytes", "prost", ] [[package]] name = "puffin" -version = "0.14.0" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "796a1b7d7d0ec984dde24615178cbd14dc697ea4cdcddfd1fee9a5f87135f9e8" +checksum = "a7b2c7a01f569fb03e2ff1f5376537f294001447bd23ce75ca51054fcd223fe4" dependencies = [ "anyhow", "bincode", "byteorder", + "instant", "once_cell", "parking_lot 0.12.1", "ruzstd", @@ -3594,9 +3749,9 @@ dependencies = [ [[package]] name = "puffin_http" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77dae1a51a8887f161ae17809ec4159bb3fbc8be60d12947039fa063cf211f1b" +checksum = "dd0248f6b7425d45ac24cecefc9272d679b8fc18e82c5d3777e90146eb9a9f85" dependencies = [ "anyhow", "crossbeam-channel", @@ -3606,9 +3761,9 @@ dependencies = [ [[package]] name = "pyo3" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccd4149c8c3975099622b4e1962dac27565cf5663b76452c3e2b66e0b6824277" +checksum = "cfb848f80438f926a9ebddf0a539ed6065434fd7aae03a89312a9821f81b8501" dependencies = [ "cfg-if", "indoc", @@ -3622,10 +3777,35 @@ dependencies = [ ] [[package]] -name = "pyo3-build-config" +name = "pyo3-asyncio" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3564762e37035cfc486228e10b0528460fa026d681b5763873c693aa0d5c260" +dependencies = [ + "futures", + "once_cell", + "pin-project-lite", + "pyo3", + "pyo3-asyncio-macros", + "tokio", +] + +[[package]] +name = "pyo3-asyncio-macros" version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cd09fe469834db21ee60e0051030339e5d361293d8cb5ec02facf7fdcf52dbf" +checksum = "be72d4cd43a27530306bd0d20d3932182fbdd072c6b98d3638bc37efb9d559dd" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pyo3-build-config" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98a42e7f42e917ce6664c832d5eee481ad514c98250c49e0b03b20593e2c7ed0" dependencies = [ "once_cell", "target-lexicon", @@ -3633,9 +3813,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c427c9a96b9c5b12156dbc11f76b14f49e9aae8905ca783ea87c249044ef137" +checksum = "a0707f0ab26826fe4ccd59b69106e9df5e12d097457c7b8f9c0fd1d2743eec4d" dependencies = [ "libc", "pyo3-build-config", @@ -3643,25 +3823,25 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b822bbba9d60630a44d2109bc410489bb2f439b33e3a14ddeb8a40b378a7c4" +checksum = "978d18e61465ecd389e1f235ff5a467146dc4e3c3968b90d274fe73a5dd4a438" dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] name = "pyo3-macros-backend" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84ae898104f7c99db06231160770f3e40dad6eb9021daddc0fedfa3e41dff10a" +checksum = "8e0e1128f85ce3fca66e435e08aa2089a2689c1c48ce97803e13f63124058462" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -3715,26 +3895,23 @@ dependencies = [ [[package]] name = "range-alloc" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e935c45e09cc6dcf00d2f0b2d630a58f4095320223d47fc68918722f0538b6" +checksum = "9c8a99fddc9f0ba0a85884b8d14e3592853e787d581ca1816c91349b10e4eeab" [[package]] name = "raw-window-handle" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed7e3d950b66e19e0c372f3fa3fbbcf85b1746b571f74e0c2af6042a5c93420a" -dependencies = [ - "cty", -] +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" [[package]] name = "raw_mesh" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", "bytes", - "clap 4.1.4", + "clap 4.2.1", "gltf", "mimalloc", "rerun", @@ -3770,7 +3947,7 @@ dependencies = [ [[package]] name = "re_analytics" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", "crossbeam", @@ -3791,9 +3968,9 @@ dependencies = [ [[package]] name = "re_arrow_store" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "anyhow", "arrow2", "arrow2_convert", @@ -3818,7 +3995,7 @@ dependencies = [ [[package]] name = "re_build_build_info" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", "time 0.3.20", @@ -3826,20 +4003,20 @@ dependencies = [ [[package]] name = "re_build_info" -version = "0.4.0" +version = "0.6.0-alpha.0" [[package]] name = "re_build_web_viewer" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "cargo_metadata", ] [[package]] name = "re_data_store" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "criterion", "document-features", "itertools", @@ -3860,14 +4037,14 @@ dependencies = [ [[package]] name = "re_error" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", ] [[package]] name = "re_format" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "arrow2", "arrow2_convert", @@ -3877,7 +4054,7 @@ dependencies = [ [[package]] name = "re_int_histogram" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "criterion", "insta", @@ -3888,7 +4065,7 @@ dependencies = [ [[package]] name = "re_log" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "env_logger", "js-sys", @@ -3901,7 +4078,7 @@ dependencies = [ [[package]] name = "re_log_encoding" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "criterion", "ehttp", @@ -3926,9 +4103,9 @@ dependencies = [ [[package]] name = "re_log_types" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "array-init", "arrow2", "arrow2_convert", @@ -3964,9 +4141,9 @@ dependencies = [ [[package]] name = "re_memory" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "backtrace", "emath", "instant", @@ -3984,7 +4161,7 @@ dependencies = [ [[package]] name = "re_query" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "arrow2", "criterion", @@ -4002,13 +4179,13 @@ dependencies = [ [[package]] name = "re_renderer" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "anyhow", "arrow2", "async-executor", - "bitflags", + "bitflags 1.3.2", "bytemuck", "clean-path", "console_error_panic_hook", @@ -4026,6 +4203,7 @@ dependencies = [ "itertools", "log", "macaw", + "never", "notify", "ordered-float", "parking_lot 0.12.1", @@ -4048,14 +4226,13 @@ dependencies = [ "web-sys", "wgpu", "wgpu-core", - "wgpu-hal", "winit", "zip", ] [[package]] name = "re_sdk" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "arrow2_convert", "document-features", @@ -4075,9 +4252,9 @@ dependencies = [ [[package]] name = "re_sdk_comms" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "anyhow", "bincode", "crossbeam", @@ -4091,7 +4268,7 @@ dependencies = [ [[package]] name = "re_smart_channel" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "crossbeam", "instant", @@ -4099,9 +4276,9 @@ dependencies = [ [[package]] name = "re_string_interner" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "nohash-hasher", "once_cell", "parking_lot 0.12.1", @@ -4110,9 +4287,9 @@ dependencies = [ [[package]] name = "re_tensor_ops" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "ndarray", "re_log_types", "serde", @@ -4120,7 +4297,7 @@ dependencies = [ [[package]] name = "re_tuid" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "arrow2", "arrow2_convert", @@ -4134,7 +4311,7 @@ dependencies = [ [[package]] name = "re_ui" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "eframe", "egui", @@ -4152,20 +4329,23 @@ dependencies = [ [[package]] name = "re_viewer" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ - "ahash 0.8.2", + "ahash 0.8.3", "anyhow", "arboard", + "async-std", "bytemuck", "cocoa", "console_error_panic_hook", + "crossbeam-channel", "eframe", "egui", "egui-wgpu", "egui_dock", "egui_extras", "enumset", + "ewebsock", "glam", "half 2.2.1", "image", @@ -4179,6 +4359,8 @@ dependencies = [ "poll-promise", "puffin", "puffin_http", + "pyo3", + "pyo3-asyncio", "re_analytics", "re_arrow_store", "re_build_build_info", @@ -4198,10 +4380,15 @@ dependencies = [ "re_ws_comms", "rfd", "serde", + "serde_json", "slotmap", "smallvec", + "strum 0.24.1", + "strum_macros 0.24.3", "thiserror", "time 0.3.20", + "tokio", + "url", "uuid", "vec1", "wasm-bindgen-futures", @@ -4212,7 +4399,7 @@ dependencies = [ [[package]] name = "re_web_viewer_server" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "cargo_metadata", "ctrlc", @@ -4229,7 +4416,7 @@ dependencies = [ [[package]] name = "re_ws_comms" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", "bincode", @@ -4253,16 +4440,16 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "redox_syscall" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb02a9aee8e8c7ad8d86890f1e16b49e0bbbffc9961ff3788c31d57c98bcbf03" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -4278,9 +4465,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.0" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "aho-corasick", "memchr", @@ -4289,23 +4476,23 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "renderdoc-sys" -version = "0.7.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1382d1f0a252c4bf97dc20d979a2fdd05b024acd7c2ed0f7595d7817666a157" +checksum = "216080ab382b992234dda86873c18d4c48358f5cfcb70fd693d7f6f2131b628b" [[package]] name = "rerun" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "anyhow", "backtrace", - "clap 4.1.4", + "clap 4.2.1", "ctrlc", "document-features", "itertools", @@ -4333,7 +4520,7 @@ dependencies = [ [[package]] name = "rerun_py" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "arrow2", "document-features", @@ -4384,7 +4571,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "windows", + "windows 0.44.0", ] [[package]] @@ -4431,13 +4618,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "300a51053b1cb55c80b7a9fde4120726ddf25ca241a1cbb926626f62fb136bff" dependencies = [ "base64 0.13.1", - "bitflags", + "bitflags 1.3.2", "serde", ] [[package]] name = "run_wasm" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "cargo-run-wasm", "pico-args", @@ -4446,9 +4633,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" +checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" [[package]] name = "rustc-hash" @@ -4467,37 +4654,23 @@ dependencies = [ [[package]] name = "rustix" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fdebc4b395b7fbb9ab11e462e20ed9051e7b16e42d24042c776eca0ac81b03" -dependencies = [ - "bitflags", - "errno 0.2.8", - "io-lifetimes", - "libc", - "linux-raw-sys 0.1.4", - "windows-sys 0.42.0", -] - -[[package]] -name = "rustix" -version = "0.37.3" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b24138615de35e32031d041a09032ef3487a616d901ca4db224e7d557efae2" +checksum = "0e78cc525325c06b4a7ff02db283472f3c042b7ff0c391f96c6d5ac6f4f91b75" dependencies = [ - "bitflags", - "errno 0.3.0", + "bitflags 1.3.2", + "errno", "io-lifetimes", "libc", - "linux-raw-sys 0.3.0", + "linux-raw-sys", "windows-sys 0.45.0", ] [[package]] name = "rustls" -version = "0.20.7" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" dependencies = [ "log", "ring", @@ -4507,15 +4680,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" +checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" [[package]] name = "ruzstd" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffae8df4aa221781b715c27bbed0fac16b6f1e2643efb7af8a24dfc78d444493" +checksum = "9a15e661f0f9dac21f3494fe5d23a6338c0ac116a2d22c2b63010acd89467ffe" dependencies = [ "byteorder", "thiserror", @@ -4524,9 +4697,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] name = "safemem" @@ -4557,9 +4730,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "scratch" -version = "1.0.2" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" +checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" [[package]] name = "sct" @@ -4586,47 +4759,47 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.14" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.147" +version = "1.0.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965" +checksum = "3c04e8343c3daeec41f58990b9d77068df31209f2af111e059e9fe9646693065" dependencies = [ "serde_derive", ] [[package]] name = "serde_bytes" -version = "0.11.7" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfc50e8183eeeb6178dcb167ae34a8051d63535023ae38b5d8d12beae193d37b" +checksum = "416bda436f9aab92e02c8e10d49a15ddd339cea90b6e340fe51ed97abb548294" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.147" +version = "1.0.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852" +checksum = "4c614d17805b093df4b147b51339e7e44bf05ef59fba1e45d83500bcfb4d8585" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] name = "serde_json" -version = "1.0.89" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db" +checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744" dependencies = [ "itoa", "ryu", @@ -4635,29 +4808,29 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc" +checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] name = "serde_test" -version = "1.0.149" +version = "1.0.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3434c4787dcd7c8c0837ffbb01e6e34091f8983b2df9655e66393a867f99f7aa" +checksum = "f259aa64e48efaf5a4fea11f97cacb109f7fc3ae9db7244cbb40c01c7faf42bc" dependencies = [ "serde", ] [[package]] name = "sha-1" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" dependencies = [ "cfg-if", "cpufeatures", @@ -4692,9 +4865,9 @@ dependencies = [ [[package]] name = "signal-hook" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" dependencies = [ "libc", "signal-hook-registry", @@ -4713,9 +4886,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] @@ -4734,9 +4907,9 @@ checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" [[package]] name = "slab" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" dependencies = [ "autocfg", ] @@ -4777,13 +4950,13 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f307c47d32d2715eb2e0ece5589057820e0e5e70d07c247d1063e844e107f454" dependencies = [ - "bitflags", + "bitflags 1.3.2", "calloop", "dlib", "lazy_static", "log", "memmap2", - "nix 0.24.2", + "nix 0.24.3", "pkg-config", "wayland-client", "wayland-cursor", @@ -4802,9 +4975,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", "winapi", @@ -4822,7 +4995,7 @@ version = "0.2.0+1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "246bfa38fe3db3f1dfc8ca5a2cdeb7348c78be2112740cc0ec8ef18b6d94f830" dependencies = [ - "bitflags", + "bitflags 1.3.2", "num-traits", ] @@ -4881,7 +5054,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -4890,11 +5063,11 @@ version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ - "heck 0.4.0", + "heck 0.4.1", "proc-macro2", "quote", "rustversion", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -4905,9 +5078,9 @@ checksum = "fa7986063f7c0ab374407e586d7048a3d5aac94f103f751088bf398e07cd5400" [[package]] name = "syn" -version = "1.0.103" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -4916,9 +5089,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.15" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +checksum = "79d9531f94112cfc3e4c8f5f02cb2b58f72c97b7efd85f70203cc6d8efda5927" dependencies = [ "proc-macro2", "quote", @@ -4933,15 +5106,15 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", "unicode-xid", ] [[package]] name = "sysinfo" -version = "0.28.3" +version = "0.28.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f69e0d827cce279e61c2f3399eb789271a8f136d8245edef70f06e3c9601a670" +checksum = "b4c2f3ca6693feb29a89724516f016488e9aafc7f37264f898593ee4b942f31b" dependencies = [ "cfg-if", "core-foundation-sys", @@ -4953,45 +5126,35 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.12.5" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9410d0f6853b1d94f0e519fb95df60f29d2c1eff2d921ffdf01a4c8a3b54f12d" +checksum = "8ae9980cab1db3fceee2f6c6f643d5d8de2997c58ee8d25fb0cc8a9e9e7348e5" [[package]] name = "tempfile" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" dependencies = [ "cfg-if", "fastrand", - "redox_syscall 0.2.16", - "rustix 0.36.7", - "windows-sys 0.42.0", + "redox_syscall 0.3.5", + "rustix", + "windows-sys 0.45.0", ] [[package]] name = "termcolor" -version = "1.1.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" dependencies = [ "winapi-util", ] -[[package]] -name = "terminal_size" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "test_image_memory" -version = "0.4.0" +version = "0.6.0-alpha.0" dependencies = [ "mimalloc", "re_format", @@ -5006,22 +5169,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.37" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.37" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] @@ -5037,9 +5200,9 @@ dependencies = [ [[package]] name = "tiff" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17def29300a156c19ae30814710d9c63cd50288a49c6fd3a10ccfbe4cf886fd" +checksum = "7449334f9ff2baf290d55d73983a7d6fa15e01198faef72af07e2a8db851e471" dependencies = [ "flate2", "jpeg-decoder", @@ -5048,9 +5211,9 @@ dependencies = [ [[package]] name = "time" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" dependencies = [ "libc", "wasi 0.10.0+wasi-snapshot-preview1", @@ -5096,9 +5259,9 @@ dependencies = [ [[package]] name = "tiny-skia" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ae12c22601b6853f4d93abb178e13bf0e1cc8e2454100c85d4d3a59ac71b3f7" +checksum = "bfef3412c6975196fdfac41ef232f910be2bb37b9dd3313a49a1a6bc815a5bdb" dependencies = [ "arrayref", "arrayvec", @@ -5110,9 +5273,9 @@ dependencies = [ [[package]] name = "tiny-skia-path" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd665853ce64402daabef6edda442dbb4f8ee93ea80957b66ba1af419f11a104" +checksum = "a4b5edac058fc98f51c935daea4d805b695b38e2f151241cad125ade2a2ac20d" dependencies = [ "arrayref", "bytemuck", @@ -5140,29 +5303,28 @@ dependencies = [ [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tobj" -version = "3.2.3" +version = "3.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deacee3abcc4fd8ff3f0f7c08d4583ab51753ed1d5a3acacd6d5773f640c27d6" +checksum = "2d0bde887a49e2e09f30ba3b454cdba3fbc971703e436c527f9f69a035b45b9b" dependencies = [ - "ahash 0.7.6", + "ahash 0.8.3", ] [[package]] name = "tokio" -version = "1.26.0" +version = "1.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" +checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" dependencies = [ "autocfg", "bytes", "libc", - "memchr", "mio", "num_cpus", "pin-project-lite", @@ -5173,13 +5335,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.8.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" +checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 2.0.12", ] [[package]] @@ -5211,9 +5373,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.4" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" +checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" dependencies = [ "bytes", "futures-core", @@ -5224,12 +5386,20 @@ dependencies = [ ] [[package]] -name = "toml" -version = "0.5.9" +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" + +[[package]] +name = "toml_edit" +version = "0.19.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" +checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" dependencies = [ - "serde", + "indexmap", + "toml_datetime", + "winnow", ] [[package]] @@ -5259,7 +5429,7 @@ checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -5273,15 +5443,15 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "ttf-parser" -version = "0.17.1" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375812fa44dab6df41c195cd2f7fecb488f6c09fbaafb62807488cefab642bff" +checksum = "0609f771ad9c6155384897e1df4d948e692667cc0588548b68eb44d052b27633" [[package]] name = "tungstenite" @@ -5326,9 +5496,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "uds_windows" @@ -5342,15 +5512,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.8" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-normalization" @@ -5363,9 +5533,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -5381,9 +5551,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unindent" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" [[package]] name = "untrusted" @@ -5432,17 +5602,33 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + [[package]] name = "uuid" -version = "1.2.2" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" dependencies = [ "getrandom", "serde", "wasm-bindgen", ] +[[package]] +name = "value-bag" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor", + "version_check", +] + [[package]] name = "vec1" version = "1.10.1" @@ -5469,12 +5655,11 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", - "winapi", "winapi-util", ] @@ -5501,7 +5686,7 @@ dependencies = [ "heck 0.3.3", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -5547,7 +5732,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", "wasm-bindgen-shared", ] @@ -5616,7 +5801,7 @@ checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5712,10 +5897,10 @@ version = "0.29.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3b068c05a039c9f755f881dc50f01732214f5685e379829759088967c46715" dependencies = [ - "bitflags", + "bitflags 1.3.2", "downcast-rs", "libc", - "nix 0.24.2", + "nix 0.24.3", "scoped-tls", "wayland-commons", "wayland-scanner", @@ -5728,7 +5913,7 @@ version = "0.29.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8691f134d584a33a6606d9d717b95c4fa20065605f798a3f350d78dced02a902" dependencies = [ - "nix 0.24.2", + "nix 0.24.3", "once_cell", "smallvec", "wayland-sys", @@ -5740,7 +5925,7 @@ version = "0.29.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6865c6b66f13d6257bef1cd40cbfe8ef2f150fb8ebbdb1e8e873455931377661" dependencies = [ - "nix 0.24.2", + "nix 0.24.3", "wayland-client", "xcursor", ] @@ -5751,7 +5936,7 @@ version = "0.29.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b950621f9354b322ee817a23474e479b34be96c2e909c14f7bc0100e9a970bc6" dependencies = [ - "bitflags", + "bitflags 1.3.2", "wayland-client", "wayland-commons", "wayland-scanner", @@ -5791,9 +5976,9 @@ dependencies = [ [[package]] name = "webbrowser" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769f1a8831de12cad7bd6f9693b15b1432d93a151557810f617f626af823acae" +checksum = "579cc485bd5ce5bfa0d738e4921dd0b956eca9800be1fd2e5257ebe95bc4617e" dependencies = [ "core-foundation", "dirs", @@ -5818,9 +6003,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.5" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" dependencies = [ "webpki", ] @@ -5831,20 +6016,10 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" -[[package]] -name = "wepoll-ffi" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" -dependencies = [ - "cc", -] - [[package]] name = "wgpu" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d745a1b6d91d85c33defbb29f0eee0450e1d2614d987e14bf6baf26009d132d7" +version = "0.16.0" +source = "git+https://github.com/rerun-io/wgpu?rev=de497aeda152a3515bac5eb4bf1b17f1757b9dac#de497aeda152a3515bac5eb4bf1b17f1757b9dac" dependencies = [ "arrayvec", "cfg-if", @@ -5866,20 +6041,19 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7131408d940e335792645a98f03639573b0480e9e2e7cddbbab74f7c6d9f3fff" +version = "0.16.0" +source = "git+https://github.com/rerun-io/wgpu?rev=de497aeda152a3515bac5eb4bf1b17f1757b9dac#de497aeda152a3515bac5eb4bf1b17f1757b9dac" dependencies = [ "arrayvec", "bit-vec", - "bitflags", + "bitflags 2.1.0", "codespan-reporting", - "fxhash", "log", "naga", "parking_lot 0.12.1", "profiling", "raw-window-handle", + "rustc-hash", "smallvec", "thiserror", "web-sys", @@ -5889,20 +6063,18 @@ dependencies = [ [[package]] name = "wgpu-hal" -version = "0.15.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdcf61a283adc744bb5453dd88ea91f3f86d5ca6b027661c6c73c7734ae0288b" +version = "0.16.0" +source = "git+https://github.com/rerun-io/wgpu?rev=de497aeda152a3515bac5eb4bf1b17f1757b9dac#de497aeda152a3515bac5eb4bf1b17f1757b9dac" dependencies = [ "android_system_properties", "arrayvec", "ash", "bit-set", - "bitflags", + "bitflags 2.1.0", "block", "core-graphics-types", "d3d12", "foreign-types", - "fxhash", "glow", "gpu-alloc", "gpu-allocator", @@ -5921,6 +6093,7 @@ dependencies = [ "range-alloc", "raw-window-handle", "renderdoc-sys", + "rustc-hash", "smallvec", "thiserror", "wasm-bindgen", @@ -5931,11 +6104,10 @@ dependencies = [ [[package]] name = "wgpu-types" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32444e121b0bd00cb02c0de32fde457a9491bd44e03e7a5db6df9b1da2f6f110" +version = "0.16.0" +source = "git+https://github.com/rerun-io/wgpu?rev=de497aeda152a3515bac5eb4bf1b17f1757b9dac#de497aeda152a3515bac5eb4bf1b17f1757b9dac" dependencies = [ - "bitflags", + "bitflags 2.1.0", "js-sys", "web-sys", ] @@ -5953,9 +6125,9 @@ dependencies = [ [[package]] name = "widestring" -version = "0.5.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" +checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" [[package]] name = "winapi" @@ -6006,6 +6178,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-sys" version = "0.42.0" @@ -6032,9 +6213,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -6047,54 +6228,54 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_i686_gnu" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_x86_64_gnu" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "winit" -version = "0.28.1" +version = "0.28.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4755d4ba0e3d30fc7beef2095e246b1e6a6fad0717608bcb87a2df4b003bcf" +checksum = "4f504e8c117b9015f618774f8d58cd4781f5a479bc41079c064f974cbb253874" dependencies = [ "android-activity", - "bitflags", + "bitflags 1.3.2", "cfg_aliases", "core-foundation", "core-graphics", @@ -6109,7 +6290,7 @@ dependencies = [ "orbclient", "percent-encoding", "raw-window-handle", - "redox_syscall 0.3.4", + "redox_syscall 0.3.5", "sctk-adwaita", "smithay-client-toolkit", "wasm-bindgen", @@ -6122,6 +6303,15 @@ dependencies = [ "x11-dl", ] +[[package]] +name = "winnow" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28" +dependencies = [ + "memchr", +] + [[package]] name = "wit-parser" version = "0.2.0" @@ -6188,12 +6378,12 @@ dependencies = [ [[package]] name = "x11-dl" -version = "2.20.0" +version = "2.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c83627bc137605acc00bb399c7b908ef460b621fc37c953db2b09f88c449ea6" +checksum = "38735924fedd5314a6e548792904ed8c6de6636285cb9fec04d5b1db85c1516f" dependencies = [ - "lazy_static", "libc", + "once_cell", "pkg-config", ] @@ -6204,7 +6394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "592b4883219f345e712b3209c62654ebda0bb50887f330cbd018d0f654bfd507" dependencies = [ "gethostname", - "nix 0.24.2", + "nix 0.24.3", "winapi", "winapi-wsapoll", "x11rb-protocol", @@ -6216,7 +6406,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56b245751c0ac9db0e006dc812031482784e434630205a93c73cfefcaabeac67" dependencies = [ - "nix 0.24.2", + "nix 0.24.3", ] [[package]] @@ -6299,7 +6489,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 1.0.103", + "syn 1.0.109", ] [[package]] @@ -6315,9 +6505,9 @@ dependencies = [ [[package]] name = "zip" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef" dependencies = [ "byteorder", "crc32fast", @@ -6346,12 +6536,13 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.3+zstd.1.5.2" +version = "2.0.7+zstd.1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44ccf97612ac95f3ccb89b2d7346b345e52f1c3019be4984f0455fb4ba991f8a" +checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5" dependencies = [ "cc", "libc", + "pkg-config", ] [[package]] @@ -6377,7 +6568,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", "zvariant_utils", ] @@ -6389,5 +6580,5 @@ checksum = "53b22993dbc4d128a17a3b6c92f1c63872dd67198537ee728d8b5d7c40640a8b" dependencies = [ "proc-macro2", "quote", - "syn 1.0.103", + "syn 1.0.109", ] diff --git a/Cargo.toml b/Cargo.toml index 6a433c6954ee..955953fc76c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,39 +16,39 @@ include = ["../../LICENSE-APACHE", "../../LICENSE-MIT", "**/*.rs", "Cargo.toml"] license = "MIT OR Apache-2.0" repository = "https://github.com/rerun-io/rerun" rust-version = "1.67" -version = "0.4.0" +version = "0.6.0-alpha.0" [workspace.dependencies] # When using alpha-release, always use exact version, e.g. `version = "=0.x.y-alpha.z" # This is because we treat alpha-releases as incompatible, but semver doesn't. # In particular: if we compile rerun 0.3.0-alpha.0 we only want it to use # re_log_types 0.3.0-alpha.0, NOT 0.3.0-alpha.4 even though it is newer and semver-compatible. -re_analytics = { path = "crates/re_analytics", version = "0.4.0" } -re_arrow_store = { path = "crates/re_arrow_store", version = "0.4.0" } -re_build_build_info = { path = "crates/re_build_build_info", version = "0.4.0" } -re_build_info = { path = "crates/re_build_info", version = "0.4.0" } -re_build_web_viewer = { path = "crates/re_build_web_viewer", version = "0.4.0" } -re_data_store = { path = "crates/re_data_store", version = "0.4.0" } -re_error = { path = "crates/re_error", version = "0.4.0" } -re_format = { path = "crates/re_format", version = "0.4.0" } -re_int_histogram = { path = "crates/re_int_histogram", version = "0.4.0" } -re_log = { path = "crates/re_log", version = "0.4.0" } -re_log_encoding = { path = "crates/re_log_encoding", version = "0.4.0" } -re_log_types = { path = "crates/re_log_types", version = "0.4.0" } -re_memory = { path = "crates/re_memory", version = "0.4.0" } -re_query = { path = "crates/re_query", version = "0.4.0" } -re_renderer = { path = "crates/re_renderer", version = "0.4.0" } -re_sdk = { path = "crates/re_sdk", version = "0.4.0" } -re_sdk_comms = { path = "crates/re_sdk_comms", version = "0.4.0" } -re_smart_channel = { path = "crates/re_smart_channel", version = "0.4.0" } -re_string_interner = { path = "crates/re_string_interner", version = "0.4.0" } -re_tensor_ops = { path = "crates/re_tensor_ops", version = "0.4.0" } -re_tuid = { path = "crates/re_tuid", version = "0.4.0" } -re_ui = { path = "crates/re_ui", version = "0.4.0" } -re_viewer = { path = "crates/re_viewer", version = "0.4.0" } -re_web_viewer_server = { path = "crates/re_web_viewer_server", version = "0.4.0" } -re_ws_comms = { path = "crates/re_ws_comms", version = "0.4.0" } -rerun = { path = "crates/rerun", version = "0.4.0" } +re_sdk_comms = { path = "crates/re_sdk_comms", version = "=0.6.0-alpha.0" } +re_analytics = { path = "crates/re_analytics", version = "=0.6.0-alpha.0" } +re_arrow_store = { path = "crates/re_arrow_store", version = "=0.6.0-alpha.0" } +re_build_build_info = { path = "crates/re_build_build_info", version = "=0.6.0-alpha.0" } +re_build_info = { path = "crates/re_build_info", version = "=0.6.0-alpha.0" } +re_build_web_viewer = { path = "crates/re_build_web_viewer", version = "=0.6.0-alpha.0" } +re_data_store = { path = "crates/re_data_store", version = "=0.6.0-alpha.0" } +re_error = { path = "crates/re_error", version = "=0.6.0-alpha.0" } +re_format = { path = "crates/re_format", version = "=0.6.0-alpha.0" } +re_int_histogram = { path = "crates/re_int_histogram", version = "=0.6.0-alpha.0" } +re_log = { path = "crates/re_log", version = "=0.6.0-alpha.0" } +re_log_encoding = { path = "crates/re_log_encoding", version = "=0.6.0-alpha.0" } +re_log_types = { path = "crates/re_log_types", version = "=0.6.0-alpha.0" } +re_memory = { path = "crates/re_memory", version = "=0.6.0-alpha.0" } +re_query = { path = "crates/re_query", version = "=0.6.0-alpha.0" } +re_renderer = { path = "crates/re_renderer", version = "=0.6.0-alpha.0", default-features = false } +re_sdk = { path = "crates/re_sdk", version = "=0.6.0-alpha.0" } +re_smart_channel = { path = "crates/re_smart_channel", version = "=0.6.0-alpha.0" } +re_string_interner = { path = "crates/re_string_interner", version = "=0.6.0-alpha.0" } +re_tensor_ops = { path = "crates/re_tensor_ops", version = "=0.6.0-alpha.0" } +re_tuid = { path = "crates/re_tuid", version = "=0.6.0-alpha.0" } +re_ui = { path = "crates/re_ui", version = "=0.6.0-alpha.0" } +re_viewer = { path = "crates/re_viewer", version = "=0.6.0-alpha.0", default-features = false } +re_web_viewer_server = { path = "crates/re_web_viewer_server", version = "=0.6.0-alpha.0" } +re_ws_comms = { path = "crates/re_ws_comms", version = "=0.6.0-alpha.0" } +rerun = { path = "crates/rerun", version = "=0.6.0-alpha.0" } ahash = "0.8" anyhow = "1.0" @@ -59,10 +59,10 @@ comfy-table = { version = "6.1", default-features = false } ctrlc = { version = "3.0", features = ["termination"] } ecolor = "0.21.0" eframe = { version = "0.21.3", default-features = false } -egui = "0.21.0" +egui = { version = "0.21.0", features = ["extra_debug_asserts", "log"] } egui-wgpu = "0.21.0" egui_dock = "0.4" -egui_extras = "0.21.0" +egui_extras = { version = "0.21.0", features = ["log"] } emath = "0.21.0" enumset = "1.0.12" epaint = "0.21.0" @@ -85,9 +85,8 @@ thiserror = "1.0" time = { version = "0.3", features = ["wasm-bindgen"] } tinyvec = { version = "1.6", features = ["alloc", "rustc_1_55"] } tokio = "1.24" -wgpu = { version = "0.15.1", default-features = false } -wgpu-core = { version = "0.15.1", default-features = false } -wgpu-hal = { version = "0.15.4", default-features = false } +wgpu = { version = "0.16" } +wgpu-core = { version = "0.16" } [profile.dev] @@ -112,3 +111,17 @@ debug = true # If that is not possible, patch to a branch that has a PR open on the upstream repo. # As a last resport, patch with a commit to our own repository. # ALWAYS document what PR the commit hash is part of, or when it was merged into the upstream trunk. + +# TODO(andreas/emilk): Update to a stable egui version +# wgpu 0.16 support, device configuration dependent on adapter +ecolor = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } +eframe = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } +egui = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } +egui-wgpu = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } +egui_extras = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } +emath = { git = "https://github.com/emilk/egui", rev = "f76eefb98d23cbf71989255aafe75a07d343f6ed" } + +# TODO(andreas): Either work around this issue in wgpu-egui (never discard command buffers) or wait for wgpu patch release. +# Fix for command buffer dropping crash https://github.com/gfx-rs/wgpu/pull/3726 +wgpu = { git = "https://github.com/rerun-io/wgpu", rev = "de497aeda152a3515bac5eb4bf1b17f1757b9dac" } +wgpu-core = { git = "https://github.com/rerun-io/wgpu", rev = "de497aeda152a3515bac5eb4bf1b17f1757b9dac" } diff --git a/README.md b/README.md index bae9d9f0f30c..ee844ca6a8bf 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@

- PyPi + PyPi crates.io MIT Apache @@ -32,57 +32,59 @@ rr.log_rect("car", bbox)

## Getting started -* **Python**: `pip install rerun-sdk` -* **Rust**: `cargo add rerun` -* **C / C++**: Coming soon + +- **Python**: `pip install depthai-viewer` +- **Rust**: `cargo add rerun` +- **C / C++**: Coming soon ### Rerun Viewer binary + Both the Python and Rust library can start the Rerun Viewer, but to stream log data over the network or load our `.rrd` data files you also need the `rerun` binary. -It can be installed with `pip install rerun-sdk` or with `cargo install rerun`. +It can be installed with `pip install depthai-viewer` or with `cargo install rerun`. You should now be able to run `rerun --help` in any terminal. - ### Documentation + - 📚 [High-level docs](http://rerun.io/docs) - ⚙️ [Examples](examples) - 🐍 [Python API docs](https://ref.rerun.io/docs/python) - 🦀 [Rust API docs](https://docs.rs/rerun/) - ⁉️ [Troubleshooting](https://www.rerun.io/docs/getting-started/troubleshooting) - ## Status + We are in early beta. There are many features we want to add, and the API is still evolving. _Expect breaking changes!_ Some shortcomings: -* Big points clouds (1M+) are slow ([#1136](https://github.com/rerun-io/rerun/issues/1136)) -* The data you want to visualize must fit in RAM. + +- Big points clouds (1M+) are slow ([#1136](https://github.com/rerun-io/rerun/issues/1136)) +- The data you want to visualize must fit in RAM. - See for how to bound memory use - We plan on having a disk-based data store some time in the future -* The Rust library takes a long time to compile +- The Rust library takes a long time to compile - We have way too many big dependencies, and we are planning on improving the situation ([#1316](https://github.com/rerun-io/rerun/pull/1316)) - ## Business model + Rerun uses an open-core model. Everything in this repository will stay open source and free (both as in beer and as in freedom). In the future, Rerun will offer a commercial product that builds on top of the core free project. The Rerun open source project targets the needs of individual developers. The commercial product targets the needs specific to teams that build and run computer vision and robotics products. - # Development -* [`ARCHITECTURE.md`](ARCHITECTURE.md) -* [`BUILD.md`](BUILD.md) -* [`rerun_py/README.md`](rerun_py/README.md) - build instructions for Python SDK -* [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) -* [`CODE_STYLE.md`](CODE_STYLE.md) -* [`CONTRIBUTING.md`](CONTRIBUTING.md) -* [`RELEASES.md`](RELEASES.md) +- [`ARCHITECTURE.md`](ARCHITECTURE.md) +- [`BUILD.md`](BUILD.md) +- [`rerun_py/README.md`](rerun_py/README.md) - build instructions for Python SDK +- [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) +- [`CODE_STYLE.md`](CODE_STYLE.md) +- [`CONTRIBUTING.md`](CONTRIBUTING.md) +- [`RELEASES.md`](RELEASES.md) ## Installing a pre-release Python SDK diff --git a/RELEASES.md b/RELEASES.md index 6ab1bded58fd..c83448189c04 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,24 +1,26 @@ # Releases and versioning -This document describes the current release and versioning strategy. This strategy is likely to change as Rerun matures. +This document describes the current release and versioning strategy. This strategy is likely to change as Rerun matures. ## See also -* [`ARCHITECTURE.md`](ARCHITECTURE.md) -* [`BUILD.md`](BUILD.md) -* [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) -* [`CODE_STYLE.md`](CODE_STYLE.md) -* [`CONTRIBUTING.md`](CONTRIBUTING.md) +- [`ARCHITECTURE.md`](ARCHITECTURE.md) +- [`BUILD.md`](BUILD.md) +- [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) +- [`CODE_STYLE.md`](CODE_STYLE.md) +- [`CONTRIBUTING.md`](CONTRIBUTING.md) ## Release Cadence -New Rerun versions are released every two weeks. Sometimes we do out-of-schedule patch releases. +New Rerun versions are released every two weeks. Sometimes we do out-of-schedule patch releases. ## Library versioning and release cadence + Each release include new versions of: -* The Python SDK -* The Rust SDK -* All rust crates + +- The Python SDK +- The Rust SDK +- All rust crates We use semantic versioning. All versions are increased in lockstep, with a minor version bump each time (`0.1.0`, `0.2.0`, `0.3.0`, …). @@ -28,121 +30,127 @@ In rare cases we will do patch releases, e.g. `0.3.1`, when there is a critical We sometimes do pre-releases. Then we use the versioning `0.2.0-alpha.0` etc. - ## Data and communication versioning + We have not yet committed to any backwards or forwards compatibility. We tag all data files (`.rrd` files) and communication protocols with the rerun version number. If there is a version mismatch, a warning is logged, but an attempt is still made to load the older or newer data. - ## Releases + Release builds of the Python Wheels are triggered by pushing a release tag to GitHub in the form `v0.2.0`. If we are doing a patch release, we do a branch off of the latest release tag (e.g. `v0.3.0`) and cherry-pick any fixes we want into that branch. ### Release checklist + Go through this checklist from top to bottom, and check each item before moving onto the next. This is a living document. Strive to improve it on each new release. -* [ ] Create a release branch called `release-0.x.y` -* [ ] If it is a patch release branch off `latest` and cherry-pick the commits that should be included -* [ ] Update `CHANGELOG.md` with the new version number with: - * [ ] A one-line summary of the release - * [ ] A multi-line summary of the release - * [ ] A gif showing a major new feature - * [ ] Run `pip install GitPython && scripts/generate_changelog.py` - * [ ] Edit PR descriptions/labels to improve the generated changelog - * [ ] Copy-paste the results into `CHANGELOG.md`. - * [ ] Editorialize the changelog if necessary - * [ ] Make sure the changelog includes instructions for handling any breaking changes - * [ ] Commit and push the changelog -* [ ] Create a draft PR containing: - * [ ] One-line summary of the release - * [ ] A multi-line summary of the release - * [ ] A gif showing a major new feature -* [ ] Test the branch ([see below](#testing-a-release)) -* [ ] Open the PR up for review with the `⛴ release` label -* [ ] Bump version number in root `Cargo.toml`. -* [ ] Check that CI is green -* [ ] Publish the crates (see below) -* [ ] `git tag -a v0.x.y -m 'Release 0.x.y - summary'` - * `git push --tags` - * This will trigger a PyPI release when pushed -* [ ] `git pull --tags && git tag -d latest && git tag -a latest -m 'Latest release' && git push --tags origin latest --force` -* [ ] Manually trigger a new web viewer build and upload at https://github.com/rerun-io/rerun/actions/workflows/rust.yml -* [ ] Wait for CI to build release artifacts and publish them on GitHub and PyPI. -* [ ] Merge PR -* [ ] Edit the GitHub release at https://github.com/rerun-io/rerun/releases/edit/v0.x.0 - * [ ] Mark it as as the latest release - * [ ] Paste in the `CHANGELOG.md` -* [ ] Wait for wheel to appear on https://pypi.org/project/rerun-sdk/ -* [ ] Test the released Python and Rust libraries (see below) -* [ ] Wait for documentation to build: https://docs.rs/releases/queue -* [ ] Point to the latest release via instructions in . -* [ ] Post on: - * [ ] Community Discord - * [ ] Rerun Twitter - * [ ] Reddit? - +- [ ] Create a release branch called `release-0.x.y` +- [ ] If it is a patch release branch off `latest` and cherry-pick the commits that should be included +- [ ] Update `CHANGELOG.md` with the new version number with: + - [ ] A one-line summary of the release + - [ ] A multi-line summary of the release + - [ ] A gif showing a major new feature + - [ ] Run `pip install GitPython && scripts/generate_changelog.py` + - [ ] Edit PR descriptions/labels to improve the generated changelog + - [ ] Copy-paste the results into `CHANGELOG.md`. + - [ ] Editorialize the changelog if necessary + - [ ] Make sure the changelog includes instructions for handling any breaking changes + - [ ] Commit and push the changelog +- [ ] Create a draft PR containing: + - [ ] One-line summary of the release + - [ ] A multi-line summary of the release + - [ ] A gif showing a major new feature +- [ ] Test the branch ([see below](#testing-a-release)) +- [ ] Open the PR up for review with the `⛴ release` label +- [ ] Bump version number in root `Cargo.toml`. +- [ ] Check that CI is green +- [ ] Publish the crates (see below) +- [ ] `git tag -a v0.x.y -m 'Release 0.x.y - summary'` + - `git push --tags` + - This will trigger a PyPI release when pushed +- [ ] `git pull --tags && git tag -d latest && git tag -a latest -m 'Latest release' && git push --tags origin latest --force` +- [ ] Manually trigger a new web viewer build and upload at https://github.com/rerun-io/rerun/actions/workflows/rust.yml +- [ ] Wait for CI to build release artifacts and publish them on GitHub and PyPI. +- [ ] Merge PR +- [ ] Edit the GitHub release at https://github.com/rerun-io/rerun/releases/edit/v0.x.0 + - [ ] Mark it as as the latest release + - [ ] Paste in the `CHANGELOG.md` +- [ ] Wait for wheel to appear on https://pypi.org/project/depthai-viewer/ +- [ ] Test the released Python and Rust libraries (see below) +- [ ] Wait for documentation to build: https://docs.rs/releases/queue +- [ ] Point to the latest release via instructions in . +- [ ] Post on: + - [ ] Community Discord + - [ ] Rerun Twitter + - [ ] Reddit? ### Testing a release + Before pushing the release tag: - * [ ] `just py-run-all` - * [ ] Test the web viewer: - * [ ] `cargo run -p rerun --features web_viewer -- --web-viewer ../nyud.rrd` - * [ ] Test on: - * [ ] Chromium - * [ ] Firefox - * [ ] Mobile + +- [ ] `just py-run-all` +- [ ] Test the web viewer: + - [ ] `cargo run -p rerun --features web_viewer -- --web-viewer ../nyud.rrd` + - [ ] Test on: + - [ ] Chromium + - [ ] Firefox + - [ ] Mobile After tagging and the CI has published: - * [ ] Test the Python packages from PyPI: `pip install rerun_sdk==0.x.0a1` - * [ ] Test rust install version: `cargo install -f rerun@0.x.0-alpha.1 -F web_viewer && rerun --web-viewer api.rrd` - * [ ] Test rust crate: Modify Cargo.toml of any example to not point to the workspace - * [ ] run with `--serve` to test web player + +- [ ] Test the Python packages from PyPI: `pip install rerun_sdk==0.x.0a1` +- [ ] Test rust install version: `cargo install -f rerun@0.x.0-alpha.1 -F web_viewer && rerun --web-viewer api.rrd` +- [ ] Test rust crate: Modify Cargo.toml of any example to not point to the workspace + - [ ] run with `--serve` to test web player Checklist for testing alpha releases: -* Windows - * [ ] Python Wheel - * [ ] Web - * [ ] Native - * [ ] Rust crate - * [ ] Web - * [ ] Native - * [ ] Rust install - * [ ] Web - * [ ] Native -* Linux - * [ ] Python Wheel - * [ ] Web - * [ ] Native - * [ ] Rust crate - * [ ] Web - * [ ] Native - * [ ] Rust install - * [ ] Web - * [ ] Native -* Mac - * [ ] Python Wheel - * [ ] Web - * [ ] Native - * [ ] Rust crate - * [ ] Web - * [ ] Native - * [ ] Rust install - * [ ] Web - * [ ] Native +- Windows + - [ ] Python Wheel + - [ ] Web + - [ ] Native + - [ ] Rust crate + - [ ] Web + - [ ] Native + - [ ] Rust install + - [ ] Web + - [ ] Native +- Linux + - [ ] Python Wheel + - [ ] Web + - [ ] Native + - [ ] Rust crate + - [ ] Web + - [ ] Native + - [ ] Rust install + - [ ] Web + - [ ] Native +- Mac + - [ ] Python Wheel + - [ ] Web + - [ ] Native + - [ ] Rust crate + - [ ] Web + - [ ] Native + - [ ] Rust install + - [ ] Web + - [ ] Native ## Publishing + First login as https://crates.io/users/rerunio with and API key you get from Emil: ```bash cargo login $API_KEY ``` ------------------------------------------------------------------------------------------------ -!! IMPORTANT !! Shut off VSCode, and don't touch anything while `publish_crates.sh` is running! -!! IMPORTANT !! Read `publish_crates.sh` for details ------------------------------------------------------------------------------------------------ +--- + +!! IMPORTANT !! Shut off VSCode, and don't touch anything while `publish_crates.sh` is running! +!! IMPORTANT !! Read `publish_crates.sh` for details + +--- ./scripts/publish_crates.sh --execute diff --git a/ci_docker/Dockerfile b/ci_docker/Dockerfile index 80c2f73bf820..f3fa89019ce5 100644 --- a/ci_docker/Dockerfile +++ b/ci_docker/Dockerfile @@ -58,7 +58,8 @@ RUN set -eux; \ # Install some cargo tools we know we'll always need # We can't do this until after we've installed rust / cargo above RUN cargo install cargo-deny && \ - cargo install cargo-cranky + cargo install cargo-cranky && \ + cargo install cargo-benchcmp # Install the python build dependencies ADD rerun_py/requirements-build.txt requirements-build.txt diff --git a/clippy.toml b/clippy.toml index 4da41d009fbc..bd8f68dcbf99 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1,4 +1,4 @@ -# There is also a clippy_wasm/clippy.toml which forbids some mthods that are not available in wasm. +# There is also a scripts/clippy_wasm/clippy.toml which forbids some methods that are not available in wasm. msrv = "1.67" @@ -47,7 +47,7 @@ disallowed-types = [ # Allow-list of words for markdown in dosctrings https://rust-lang.github.io/rust-clippy/master/index.html#doc_markdown doc-valid-idents = [ - # You must also update the same list in `clippy_wasm/clippy.toml`! + # You must also update the same list in `scripts/clippy_wasm/clippy.toml`! "GitHub", "GLB", "GLTF", diff --git a/crates/re_analytics/src/cli.rs b/crates/re_analytics/src/cli.rs index 7055563dcd92..4b146daa6996 100644 --- a/crates/re_analytics/src/cli.rs +++ b/crates/re_analytics/src/cli.rs @@ -83,7 +83,7 @@ const DETAILS: &str = " What data is collected? - The exact set of analytics events and parameters can be found here: https://github.com/rerun-io/rerun/blob/GIT_HASH/crates/re_viewer/src/viewer_analytics.rs - - We collect high level events about the usage of the Rerun Viewer. For example: + - We collect high level events about the usage of the Depthai Viewer. For example: - The event 'Viewer Opened' helps us estimate how often Rerun is used. - The event 'Data Source Connected' helps us understand if users tend to use live data sources or recordings most, which helps us prioritize features. diff --git a/crates/re_analytics/src/lib.rs b/crates/re_analytics/src/lib.rs index 9938887a5f55..8f346c3ea3df 100644 --- a/crates/re_analytics/src/lib.rs +++ b/crates/re_analytics/src/lib.rs @@ -211,7 +211,7 @@ const DISCLAIMER: &str = " help the Rerun team improve the library. Summary: - - We only collect high level events about the features used within the Rerun Viewer. + - We only collect high level events about the features used within the Depthai Viewer. - The actual data you log to Rerun, such as point clouds, images, or text logs, will never be collected. - We don't log IP addresses. diff --git a/crates/re_build_info/src/crate_version.rs b/crates/re_build_info/src/crate_version.rs index 9a29e8e734a7..a4c3ee5e04bf 100644 --- a/crates/re_build_info/src/crate_version.rs +++ b/crates/re_build_info/src/crate_version.rs @@ -55,7 +55,7 @@ impl CrateVersion { pub fn from_bytes([major, minor, patch, suffix_byte]: [u8; 4]) -> Self { let is_alpha = (suffix_byte & IS_ALPHA_BIT) != 0; let is_prerelease = (suffix_byte & IS_PRERELEASE_BIT) != 0; - let alpha_version = suffix_byte & 0b0111_1111; + let alpha_version = suffix_byte & !(IS_ALPHA_BIT | IS_PRERELEASE_BIT); Self { major, @@ -273,6 +273,22 @@ fn test_format_parse_roundtrip() { } } +#[test] +fn test_format_parse_roundtrip_bytes() { + let parse = CrateVersion::parse; + for version in [ + "0.2.0", + "1.2.3", + "12.23.24", + "12.23.24-alpha.31", + "12.23.24-alpha.31+foo", + ] { + let version = parse(version); + let bytes = version.to_bytes(); + assert_eq!(CrateVersion::from_bytes(bytes), version); + } +} + #[test] fn test_compatibility() { fn are_compatible(a: &str, b: &str) -> bool { diff --git a/crates/re_build_web_viewer/src/lib.rs b/crates/re_build_web_viewer/src/lib.rs index 8ec1d53f0a88..c5f1c179f10a 100644 --- a/crates/re_build_web_viewer/src/lib.rs +++ b/crates/re_build_web_viewer/src/lib.rs @@ -12,7 +12,7 @@ fn target_directory() -> Utf8PathBuf { } /// Build `re_viewer` as Wasm, generate .js bindings for it, and place it all into the `./web_viewer` folder. -pub fn build(release: bool) { +pub fn build(release: bool, webgpu: bool) { eprintln!("Building web viewer wasm…"); eprintln!("We assume you've already run ./scripts/setup_web.sh"); @@ -63,7 +63,13 @@ pub fn build(release: bool) { "wasm32-unknown-unknown", "--target-dir", target_wasm_dir.as_str(), + "--no-default-features", ]); + if webgpu { + cmd.arg("--features=analytics"); + } else { + cmd.arg("--features=analytics,webgl"); + } if release { cmd.arg("--release"); } @@ -71,6 +77,7 @@ pub fn build(release: bool) { // This is required to enable the web_sys clipboard API which egui_web uses // https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.Clipboard.html // https://rustwasm.github.io/docs/wasm-bindgen/web-sys/unstable-apis.html + // Furthermore, it's necessary for unstable WebGPU apis to work. cmd.env("RUSTFLAGS", "--cfg=web_sys_unstable_apis"); // When executing this script from a Rust build script, do _not_, under any circumstances, diff --git a/crates/re_build_web_viewer/src/main.rs b/crates/re_build_web_viewer/src/main.rs index 137cd401a742..1c3d2249d26f 100644 --- a/crates/re_build_web_viewer/src/main.rs +++ b/crates/re_build_web_viewer/src/main.rs @@ -2,6 +2,7 @@ use std::process::ExitCode; fn main() -> ExitCode { let mut release = None; + let mut webgpu = false; for arg in std::env::args().skip(1) { match arg.as_str() { @@ -17,6 +18,9 @@ fn main() -> ExitCode { assert!(release.is_none(), "Can't set both --release and --debug"); release = Some(true); } + "--webgpu" => { + webgpu = true; + } _ => { print_help(); return ExitCode::FAILURE; @@ -29,7 +33,7 @@ fn main() -> ExitCode { return ExitCode::FAILURE; }; - re_build_web_viewer::build(release); + re_build_web_viewer::build(release, webgpu); ExitCode::SUCCESS } @@ -41,6 +45,7 @@ fn print_help() { --debug: Build a debug binary --release: Compile for release, and run wasm-opt. NOTE: --release also removes debug symbols which are otherwise useful for in-browser profiling. + --webgpu: Enable WebGPU support (experimental). If not set the viewer will use WebGL instead. " ); } diff --git a/crates/re_data_store/src/entity_properties.rs b/crates/re_data_store/src/entity_properties.rs index 9929ca4c13c3..889a0a8cc2cd 100644 --- a/crates/re_data_store/src/entity_properties.rs +++ b/crates/re_data_store/src/entity_properties.rs @@ -44,8 +44,14 @@ pub struct EntityProperties { pub visible_history: ExtraQueryHistory, pub interactive: bool, + /// Enable color mapping? + /// /// What kind of color mapping should be applied (none, map, texture, transfer..)? pub color_mapper: EditableAutoValue, + /// Points to an entity with an albedo texture. + /// + /// Only relevant if [`Self::color_mapper`] is set to `AlbedoTexture`. + pub albedo_texture: Option, /// Distance of the projection plane (frustum far plane). /// @@ -80,6 +86,7 @@ impl Default for EntityProperties { backproject_depth: EditableAutoValue::Auto(true), depth_from_world_scale: EditableAutoValue::default(), backproject_radius_scale: EditableAutoValue::Auto(1.0), + albedo_texture: None, } } } @@ -94,7 +101,7 @@ impl EntityProperties { interactive: self.interactive && child.interactive, color_mapper: self.color_mapper.or(&child.color_mapper).clone(), - + albedo_texture: self.albedo_texture.clone().or(child.albedo_texture.clone()), pinhole_image_plane_distance: self .pinhole_image_plane_distance .or(&child.pinhole_image_plane_distance) @@ -170,25 +177,27 @@ impl std::fmt::Display for Colormap { pub enum ColorMapper { /// Use a well-known color map, pre-implemented as a wgsl module. Colormap(Colormap), - // TODO(cmc): support textures. + /// Point to an entity with an albedo texture. + AlbedoTexture, // TODO(cmc): support custom transfer functions. } +impl Default for ColorMapper { + #[inline] + fn default() -> Self { + Self::AlbedoTexture + } +} + impl std::fmt::Display for ColorMapper { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - ColorMapper::Colormap(colormap) => colormap.fmt(f), + ColorMapper::Colormap(colormap) => write!(f, "Map:{colormap}"), + ColorMapper::AlbedoTexture => write!(f, "Albedo texture"), } } } -impl Default for ColorMapper { - #[inline] - fn default() -> Self { - Self::Colormap(Colormap::default()) - } -} - // ---------------------------------------------------------------------------- /// Get the latest value for a given [`re_log_types::Component`]. diff --git a/crates/re_format/Cargo.toml b/crates/re_format/Cargo.toml index 198768511bc0..b502f8f6eb4e 100644 --- a/crates/re_format/Cargo.toml +++ b/crates/re_format/Cargo.toml @@ -19,4 +19,4 @@ all-features = true arrow2.workspace = true arrow2_convert.workspace = true comfy-table.workspace = true -re_tuid.workspace = true +re_tuid = { workspace = true, features = ["arrow2_convert"] } diff --git a/crates/re_log_types/src/arrow_msg.rs b/crates/re_log_types/src/arrow_msg.rs index 54e5c01b68e5..b83e24266d29 100644 --- a/crates/re_log_types/src/arrow_msg.rs +++ b/crates/re_log_types/src/arrow_msg.rs @@ -42,13 +42,13 @@ impl serde::Serialize for ArrowMsg { let mut writer = StreamWriter::new(&mut buf, Default::default()); writer .start(&self.schema, None) - .map_err(|e| serde::ser::Error::custom(e.to_string()))?; + .map_err(|err| serde::ser::Error::custom(err.to_string()))?; writer .write(&self.chunk, None) - .map_err(|e| serde::ser::Error::custom(e.to_string()))?; + .map_err(|err| serde::ser::Error::custom(err.to_string()))?; writer .finish() - .map_err(|e| serde::ser::Error::custom(e.to_string()))?; + .map_err(|err| serde::ser::Error::custom(err.to_string()))?; let mut inner = serializer.serialize_tuple(3)?; inner.serialize_element(&self.table_id)?; diff --git a/crates/re_log_types/src/component_types/arrow_convert_shims.rs b/crates/re_log_types/src/component_types/arrow_convert_shims.rs index 8d196842169d..b78ff5f4c065 100644 --- a/crates/re_log_types/src/component_types/arrow_convert_shims.rs +++ b/crates/re_log_types/src/component_types/arrow_convert_shims.rs @@ -79,6 +79,7 @@ impl<'a> Iterator for BufferBinaryArrayIter<'a> { /// Internal `ArrowArray` helper to iterate over a `BinaryArray` while exposing Buffer slices pub struct BufferBinaryArray; +#[cfg(not(target_os = "windows"))] extern "C" { fn do_not_call_into_iter(); // we never define this function, so the linker will fail } diff --git a/crates/re_log_types/src/component_types/imu.rs b/crates/re_log_types/src/component_types/imu.rs new file mode 100644 index 000000000000..bfcdf7566eb8 --- /dev/null +++ b/crates/re_log_types/src/component_types/imu.rs @@ -0,0 +1,25 @@ +use crate::{Component, EntityPath}; +use arrow2_convert::{ArrowDeserialize, ArrowField, ArrowSerialize}; + +use super::{Point3D, Quaternion}; + +#[derive(Clone, Debug, PartialEq, ArrowField, ArrowSerialize, ArrowDeserialize)] +pub struct ImuData { + pub accel: Point3D, + pub gyro: Point3D, + pub mag: Option, + pub orientation: Quaternion, +} + +impl ImuData { + pub fn entity_path() -> EntityPath { + "imu_data".into() + } +} + +impl Component for ImuData { + #[inline] + fn name() -> crate::ComponentName { + "rerun.imu".into() + } +} diff --git a/crates/re_log_types/src/component_types/mod.rs b/crates/re_log_types/src/component_types/mod.rs index 61066b8c8f58..ce6334c5b305 100644 --- a/crates/re_log_types/src/component_types/mod.rs +++ b/crates/re_log_types/src/component_types/mod.rs @@ -23,12 +23,14 @@ mod class_id; mod color; pub mod context; pub mod coordinates; +mod imu; mod instance_key; mod keypoint_id; mod label; mod linestrip; mod mat; mod mesh3d; +mod node_graph; mod point; mod quaternion; mod radius; @@ -46,12 +48,14 @@ pub use class_id::ClassId; pub use color::ColorRGBA; pub use context::{AnnotationContext, AnnotationInfo, ClassDescription}; pub use coordinates::ViewCoordinates; +pub use imu::ImuData; pub use instance_key::InstanceKey; pub use keypoint_id::KeypointId; pub use label::Label; pub use linestrip::{LineStrip2D, LineStrip3D}; pub use mat::Mat3x3; pub use mesh3d::{EncodedMesh3D, Mesh3D, MeshFormat, MeshId, RawMesh3D}; +pub use node_graph::NodeGraph; pub use point::{Point2D, Point3D}; pub use quaternion::Quaternion; pub use radius::Radius; @@ -70,7 +74,7 @@ pub use vec::{Vec2D, Vec3D, Vec4D}; lazy_static! { //TODO(john): use a run-time type registry - static ref FIELDS: [Field; 25] = [ + static ref FIELDS: [Field; 27] = [ ::field(), ::field(), ::field(), @@ -96,6 +100,8 @@ lazy_static! { ::field(), ::field(), ::field(), + ::field(), + ::field(), ]; } @@ -210,6 +216,7 @@ where pub struct FastFixedSizeListArray(std::marker::PhantomData); +#[cfg(not(target_os = "windows"))] extern "C" { fn do_not_call_into_iter(); // we never define this function, so the linker will fail } diff --git a/crates/re_log_types/src/component_types/node_graph.rs b/crates/re_log_types/src/component_types/node_graph.rs new file mode 100644 index 000000000000..edf8a83270ad --- /dev/null +++ b/crates/re_log_types/src/component_types/node_graph.rs @@ -0,0 +1,40 @@ +use arrow2_convert::{ArrowDeserialize, ArrowField, ArrowSerialize}; + +use crate::Component; + +// --- + +/// A double-precision NodeGraph. +/// +/// ## Examples +/// +/// ``` +/// # use re_log_types::component_types::NodeGraph; +/// # use arrow2_convert::field::ArrowField; +/// # use arrow2::datatypes::{DataType, Field}; +/// assert_eq!(NodeGraph::data_type(), DataType::Float64); +/// ``` +#[derive(Debug, Clone, Copy, ArrowField, ArrowSerialize, ArrowDeserialize)] +#[arrow_field(transparent)] +pub struct NodeGraph(pub f64); + +impl Component for NodeGraph { + #[inline] + fn name() -> crate::ComponentName { + "rerun.pipeline_graph".into() + } +} + +impl From for NodeGraph { + #[inline] + fn from(value: f64) -> Self { + Self(value) + } +} + +impl From for f64 { + #[inline] + fn from(value: NodeGraph) -> Self { + value.0 + } +} diff --git a/crates/re_log_types/src/component_types/tensor.rs b/crates/re_log_types/src/component_types/tensor.rs index 9c4228b845e6..cf02a3bc2b8e 100644 --- a/crates/re_log_types/src/component_types/tensor.rs +++ b/crates/re_log_types/src/component_types/tensor.rs @@ -857,7 +857,7 @@ impl Tensor { /// A thin wrapper around a [`Tensor`] that is guaranteed to not be compressed (never a jpeg). /// /// All clones are shallow, like for [`Tensor`]. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct DecodedTensor(Tensor); impl DecodedTensor { diff --git a/crates/re_log_types/src/lib.rs b/crates/re_log_types/src/lib.rs index 7775be821e74..956577858493 100644 --- a/crates/re_log_types/src/lib.rs +++ b/crates/re_log_types/src/lib.rs @@ -42,7 +42,7 @@ pub use self::component_types::coordinates; pub use self::component_types::AnnotationContext; pub use self::component_types::Arrow3D; pub use self::component_types::DecodedTensor; -pub use self::component_types::{EncodedMesh3D, Mesh3D, MeshFormat, MeshId, RawMesh3D}; +pub use self::component_types::{EncodedMesh3D, ImuData, Mesh3D, MeshFormat, MeshId, RawMesh3D}; pub use self::component_types::{Tensor, ViewCoordinates}; pub use self::data::*; pub use self::data_cell::{DataCell, DataCellError, DataCellInner, DataCellResult}; diff --git a/crates/re_renderer/Cargo.toml b/crates/re_renderer/Cargo.toml index 03bc85390551..c5a2fb1df2b7 100644 --- a/crates/re_renderer/Cargo.toml +++ b/crates/re_renderer/Cargo.toml @@ -24,7 +24,7 @@ targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] [features] -default = ["arrow", "import-obj", "import-gltf"] +default = ["import-obj", "import-gltf"] ## Support for Arrow datatypes for end-to-end zero-copy. arrow = ["dep:arrow2"] @@ -38,6 +38,8 @@ import-gltf = ["dep:gltf"] ## Enable (de)serialization using serde. serde = ["dep:serde"] +## Render using webgl instead of webgpu on wasm builds. +webgl = ["wgpu/webgl"] [dependencies] re_error.workspace = true @@ -55,6 +57,7 @@ glam = { workspace = true, features = ["bytemuck"] } half = { workspace = true, features = ["bytemuck"] } itertools = { workspace = true } macaw.workspace = true +never = '0.1' ordered-float = "3.2" parking_lot.workspace = true slotmap = "1.0.6" @@ -62,6 +65,7 @@ smallvec.workspace = true static_assertions = "1.1" thiserror.workspace = true type-map = "0.5" +wgpu.workspace = true # optional arrow2 = { workspace = true, optional = true } @@ -74,17 +78,7 @@ tobj = { version = "3.2", optional = true } crossbeam = "0.8" notify = "5.0" puffin.workspace = true -wgpu = { workspace = true, default-features = false, features = ["wgsl"] } wgpu-core.workspace = true -wgpu-hal.workspace = true - -# wasm -[target.'cfg(target_arch = "wasm32")'.dependencies] -wgpu = { workspace = true, default-features = false, features = [ - "webgl", - "wgsl", -] } - # For examples: [dev-dependencies] @@ -109,7 +103,7 @@ console_error_panic_hook = "0.1.6" # required to make rand work on wasm, see https://github.com/rust-random/rand#wasm-support getrandom = { version = "0.2", features = ["js"] } wasm-bindgen-futures = "0.4.33" -web-sys = { version = "0.3.60", features = [ +web-sys = { version = "0.3.61", features = [ "Location", "Blob", "RequestInit", diff --git a/crates/re_renderer/examples/2d.rs b/crates/re_renderer/examples/2d.rs index 32940991c1f2..eb17d6955cfd 100644 --- a/crates/re_renderer/examples/2d.rs +++ b/crates/re_renderer/examples/2d.rs @@ -36,16 +36,19 @@ impl framework::Example for Render2D { ); } - let rerun_logo_texture = re_ctx.texture_manager_2d.create( - &mut re_ctx.gpu_resources.textures, - &Texture2DCreationDesc { - label: "rerun logo".into(), - data: image_data.into(), - format: wgpu::TextureFormat::Rgba8UnormSrgb, - width: rerun_logo.width(), - height: rerun_logo.height(), - }, - ); + let rerun_logo_texture = re_ctx + .texture_manager_2d + .create( + &mut re_ctx.gpu_resources.textures, + &Texture2DCreationDesc { + label: "rerun logo".into(), + data: image_data.into(), + format: wgpu::TextureFormat::Rgba8UnormSrgb, + width: rerun_logo.width(), + height: rerun_logo.height(), + }, + ) + .expect("Failed to create texture for rerun logo"); Render2D { rerun_logo_texture, diff --git a/crates/re_renderer/examples/depth_cloud.rs b/crates/re_renderer/examples/depth_cloud.rs index c66c17eb79b3..2f5cdb8f6905 100644 --- a/crates/re_renderer/examples/depth_cloud.rs +++ b/crates/re_renderer/examples/depth_cloud.rs @@ -20,8 +20,8 @@ use itertools::Itertools; use macaw::IsoTransform; use re_renderer::{ renderer::{ - ColormappedTexture, DepthCloud, DepthCloudDepthData, DepthCloudDrawData, DepthClouds, - DrawData, GenericSkyboxDrawData, RectangleDrawData, RectangleOptions, TexturedRect, + ColormappedTexture, DepthCloud, DepthCloudDrawData, DepthClouds, DrawData, + GenericSkyboxDrawData, RectangleDrawData, RectangleOptions, TexturedRect, }, resource_managers::{GpuTexture2D, Texture2DCreationDesc}, view_builder::{self, Projection, ViewBuilder}, @@ -44,7 +44,6 @@ enum CameraControl { struct RenderDepthClouds { depth: DepthTexture, albedo: AlbedoTexture, - albedo_handle: GpuTexture2D, scale: f32, point_radius_from_world_depth: f32, @@ -175,14 +174,16 @@ impl RenderDepthClouds { clouds: vec![DepthCloud { world_from_obj, depth_camera_intrinsics: *intrinsics, - world_depth_from_data_depth: 1.0, + world_depth_from_texture_depth: 1.0, point_radius_from_world_depth: *point_radius_from_world_depth, max_depth_in_world: 5.0, depth_dimensions: depth.dimensions, - depth_data: depth.data.clone(), + depth_texture: depth.texture.clone(), colormap: re_renderer::Colormap::Turbo, outline_mask_id: Default::default(), picking_object_id: Default::default(), + albedo_dimensions: glam::UVec2::ZERO, + albedo_data: None, }], radius_boost_in_ui_points_for_outlines: 2.5, }, @@ -233,19 +234,8 @@ impl framework::Example for RenderDepthClouds { fn new(re_ctx: &mut re_renderer::RenderContext) -> Self { re_log::info!("Stop camera movement by pressing 'Space'"); - let depth = DepthTexture::spiral((640, 480).into()); - let albedo = AlbedoTexture::spiral(depth.dimensions); - - let albedo_handle = re_ctx.texture_manager_2d.create( - &mut re_ctx.gpu_resources.textures, - &Texture2DCreationDesc { - label: "albedo".into(), - data: bytemuck::cast_slice(&albedo.rgba8).into(), - format: wgpu::TextureFormat::Rgba8UnormSrgb, - width: albedo.dimensions.x, - height: albedo.dimensions.y, - }, - ); + let depth = DepthTexture::spiral(re_ctx, glam::uvec2(640, 480)); + let albedo = AlbedoTexture::spiral(re_ctx, depth.dimensions); let scale = 50.0; let point_radius_from_world_depth = 0.1; @@ -263,7 +253,6 @@ impl framework::Example for RenderDepthClouds { RenderDepthClouds { depth, albedo, - albedo_handle, scale, point_radius_from_world_depth, @@ -283,7 +272,6 @@ impl framework::Example for RenderDepthClouds { ) -> Vec { let Self { albedo, - albedo_handle, camera_control, camera_position, .. @@ -326,7 +314,7 @@ impl framework::Example for RenderDepthClouds { .transform_point3(glam::Vec3::new(1.0, 1.0, 0.0)), extent_u: world_from_model.transform_vector3(-glam::Vec3::X), extent_v: world_from_model.transform_vector3(-glam::Vec3::Y), - colormapped_texture: ColormappedTexture::from_unorm_srgba(albedo_handle.clone()), + colormapped_texture: ColormappedTexture::from_unorm_srgba(albedo.texture.clone()), options: RectangleOptions { texture_filter_magnification: re_renderer::renderer::TextureFilterMag::Nearest, texture_filter_minification: re_renderer::renderer::TextureFilterMin::Linear, @@ -403,40 +391,60 @@ fn spiral(dimensions: glam::UVec2) -> impl Iterator { }) } +pub fn hash(value: &impl std::hash::Hash) -> u64 { + ahash::RandomState::with_seeds(1, 2, 3, 4).hash_one(value) +} + struct DepthTexture { dimensions: glam::UVec2, - data: DepthCloudDepthData, + data: Vec, + texture: GpuTexture2D, } impl DepthTexture { - pub fn spiral(dimensions: glam::UVec2) -> Self { + pub fn spiral(re_ctx: &mut re_renderer::RenderContext, dimensions: glam::UVec2) -> Self { let size = (dimensions.x * dimensions.y) as usize; let mut data = std::iter::repeat(0f32).take(size).collect_vec(); spiral(dimensions).for_each(|(texcoords, d)| { data[(texcoords.x + texcoords.y * dimensions.x) as usize] = d; }); - let data = DepthCloudDepthData::F32(data.into()); - Self { dimensions, data } + let label = format!("depth texture spiral {dimensions}"); + let texture = re_ctx + .texture_manager_2d + .get_or_create( + hash(&label), + &mut re_ctx.gpu_resources.textures, + Texture2DCreationDesc { + label: label.into(), + data: bytemuck::cast_slice(&data).into(), + format: wgpu::TextureFormat::R32Float, + width: dimensions.x, + height: dimensions.y, + }, + ) + .expect("Failed to create depth texture."); + + Self { + dimensions, + data, + texture, + } } pub fn get_linear(&self, x: u32, y: u32) -> f32 { - match &self.data { - DepthCloudDepthData::U16(data) => { - data[(x + y * self.dimensions.x) as usize] as f32 / u16::MAX as f32 - } - DepthCloudDepthData::F32(data) => data[(x + y * self.dimensions.x) as usize], - } + self.data[(x + y * self.dimensions.x) as usize] } } struct AlbedoTexture { dimensions: glam::UVec2, rgba8: Vec, + texture: GpuTexture2D, } impl AlbedoTexture { - pub fn spiral(dimensions: glam::UVec2) -> Self { + pub fn spiral(re_ctx: &mut re_renderer::RenderContext, dimensions: glam::UVec2) -> Self { let size = (dimensions.x * dimensions.y) as usize; let mut rgba8 = std::iter::repeat(0).take(size * 4).collect_vec(); spiral(dimensions).for_each(|(texcoords, d)| { @@ -444,7 +452,27 @@ impl AlbedoTexture { rgba8[idx..idx + 4].copy_from_slice(re_renderer::colormap_turbo_srgb(d).as_slice()); }); - Self { dimensions, rgba8 } + let label = format!("albedo texture spiral {dimensions}"); + let texture = re_ctx + .texture_manager_2d + .get_or_create( + hash(&label), + &mut re_ctx.gpu_resources.textures, + Texture2DCreationDesc { + label: label.into(), + data: bytemuck::cast_slice(&rgba8).into(), + format: wgpu::TextureFormat::Rgba8UnormSrgb, + width: dimensions.x, + height: dimensions.y, + }, + ) + .expect("Failed to create albedo texture."); + + Self { + dimensions, + rgba8, + texture, + } } #[allow(dead_code)] diff --git a/crates/re_renderer/examples/framework.rs b/crates/re_renderer/examples/framework.rs index fef6e7544b5b..2575f9d82463 100644 --- a/crates/re_renderer/examples/framework.rs +++ b/crates/re_renderer/examples/framework.rs @@ -125,7 +125,7 @@ impl Application { .await .context("failed to find an appropriate adapter")?; - let hardware_tier = HardwareTier::default(); + let hardware_tier = HardwareTier::from_adapter(&adapter); hardware_tier.check_downlevel_capabilities(&adapter.get_downlevel_capabilities())?; let (device, queue) = adapter .request_device( @@ -159,6 +159,7 @@ impl Application { surface.configure(&device, &surface_config); let mut re_ctx = RenderContext::new( + &adapter, device, queue, RenderContextConfig { diff --git a/crates/re_renderer/shader/colormap.wgsl b/crates/re_renderer/shader/colormap.wgsl index 59be61afdfe4..7d4676ac8852 100644 --- a/crates/re_renderer/shader/colormap.wgsl +++ b/crates/re_renderer/shader/colormap.wgsl @@ -8,11 +8,12 @@ const COLORMAP_MAGMA: u32 = 3u; const COLORMAP_PLASMA: u32 = 4u; const COLORMAP_TURBO: u32 = 5u; const COLORMAP_VIRIDIS: u32 = 6u; - +const ALBEDO_TEXTURE: u32 = 7u; /// Returns a gamma-space sRGB in 0-1 range. /// /// The input will be saturated to [0, 1] range. -fn colormap_srgb(which: u32, t: f32) -> Vec3 { +fn colormap_srgb(which: u32, t_unsaturated: f32) -> Vec3 { + let t = saturate(t_unsaturated); if which == COLORMAP_GRAYSCALE { return linear_from_srgb(Vec3(t)); } else if which == COLORMAP_INFERNO { @@ -61,7 +62,6 @@ fn colormap_turbo_srgb(t: f32) -> Vec3 { let g2 = Vec2(4.27729857, 2.82956604); let b2 = Vec2(-89.90310912, 27.34824973); - let t = saturate(t); let v4 = vec4(1.0, t, t * t, t * t * t); let v2 = v4.zw * v4.z; @@ -97,7 +97,6 @@ fn colormap_viridis_srgb(t: f32) -> Vec3 { let c4 = Vec3(6.228269936347081, 14.17993336680509, 56.69055260068105); let c5 = Vec3(4.776384997670288, -13.74514537774601, -65.35303263337234); let c6 = Vec3(-5.435455855934631, 4.645852612178535, 26.3124352495832); - let t = saturate(t); return c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * c6))))); } @@ -112,7 +111,6 @@ fn colormap_plasma_srgb(t: f32) -> Vec3 { let c4 = Vec3(-11.10743619062271, -82.66631109428045, 60.13984767418263); let c5 = Vec3(10.02306557647065, 71.41361770095349, -54.07218655560067); let c6 = Vec3(-3.658713842777788, -22.93153465461149, 18.19190778539828); - let t = saturate(t); return c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * c6))))); } @@ -127,7 +125,6 @@ fn colormap_magma_srgb(t: f32) -> Vec3 { let c4 = Vec3(52.17613981234068, -27.94360607168351, 12.94416944238394); let c5 = Vec3(-50.76852536473588, 29.04658282127291, 4.23415299384598); let c6 = Vec3(18.65570506591883, -11.48977351997711, -5.601961508734096); - let t = saturate(t); return c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * c6))))); } @@ -142,6 +139,5 @@ fn colormap_inferno_srgb(t: f32) -> Vec3 { let c4 = Vec3(77.162935699427, -33.40235894210092, -81.80730925738993); let c5 = Vec3(-71.31942824499214, 32.62606426397723, 73.20951985803202); let c6 = Vec3(25.13112622477341, -12.24266895238567, -23.07032500287172); - let t = saturate(t); return c0 + t * (c1 + t * (c2 + t * (c3 + t * (c4 + t * (c5 + t * c6))))); } diff --git a/crates/re_renderer/shader/depth_cloud.wgsl b/crates/re_renderer/shader/depth_cloud.wgsl index 1e7f7afdf0d7..db39dd6d20d2 100644 --- a/crates/re_renderer/shader/depth_cloud.wgsl +++ b/crates/re_renderer/shader/depth_cloud.wgsl @@ -43,16 +43,26 @@ struct DepthCloudInfo { /// Configures color mapping mode, see `colormap.wgsl`. colormap: u32, + /// Is the albedo texture rgb or mono + albedo_color_space: u32, + /// Changes between the opaque and outline draw-phases. radius_boost_in_ui_points: f32, }; +const ALBEDO_COLOR_RGB: u32 = 0u; +const ALBEDO_COLOR_MONO: u32 = 1u; + @group(1) @binding(0) var depth_cloud_info: DepthCloudInfo; @group(1) @binding(1) var depth_texture: texture_2d; +/// Only sampled if `DepthCloudInfo::colormap == ALBEDO_TEXTURE`. +@group(1) @binding(2) +var albedo_texture: texture_2d; + struct VertexOut { @builtin(position) pos_in_clip: Vec4, @@ -82,19 +92,32 @@ struct PointData { } // Backprojects the depth texture using the intrinsics passed in the uniform buffer. -fn compute_point_data(quad_idx: i32) -> PointData { +fn compute_point_data(quad_idx: u32) -> PointData { let wh = textureDimensions(depth_texture); - let texcoords = IVec2(quad_idx % wh.x, quad_idx / wh.x); + let texcoords = UVec2(quad_idx % wh.x, quad_idx / wh.x); // TODO(cmc): expose knobs to linearize/normalize/flip/cam-to-plane depth. let world_space_depth = depth_cloud_info.world_depth_from_texture_value * textureLoad(depth_texture, texcoords, 0).x; var data: PointData; - if 0.0 < world_space_depth && world_space_depth < f32max { // TODO(cmc): albedo textures - let color = Vec4(colormap_linear(depth_cloud_info.colormap, world_space_depth / depth_cloud_info.max_depth_in_world), 1.0); - + // let color = Vec4(colormap_linear(depth_cloud_info.colormap, world_space_depth / depth_cloud_info.max_depth_in_world), 1.0); + + var color: Vec4; + if depth_cloud_info.colormap == ALBEDO_TEXTURE { + color = textureSampleLevel( + albedo_texture, + trilinear_sampler, + Vec2(texcoords) / Vec2(textureDimensions(albedo_texture)), + 0.0 + ); + if depth_cloud_info.albedo_color_space == ALBEDO_COLOR_MONO { + color = Vec4(linear_from_srgb(Vec3(color.r)), 1.0); + } + } else { + color = Vec4(colormap_srgb(depth_cloud_info.colormap, world_space_depth), 1.0); + } // TODO(cmc): This assumes a pinhole camera; need to support other kinds at some point. let intrinsics = depth_cloud_info.depth_camera_intrinsics; let focal_length = Vec2(intrinsics[0][0], intrinsics[1][1]); diff --git a/crates/re_renderer/shader/lines.wgsl b/crates/re_renderer/shader/lines.wgsl index c8812e8112df..6a143a8233c8 100644 --- a/crates/re_renderer/shader/lines.wgsl +++ b/crates/re_renderer/shader/lines.wgsl @@ -32,11 +32,8 @@ struct BatchUniformBuffer { @group(2) @binding(0) var batch: BatchUniformBuffer; - -// textureLoad needs i32 right now, so we use that with all sizes & indices to avoid casts -// https://github.com/gfx-rs/naga/issues/1997 -const POSITION_TEXTURE_SIZE: i32 = 512; -const LINE_STRIP_TEXTURE_SIZE: i32 = 256; +const POSITION_TEXTURE_SIZE: u32 = 512u; +const LINE_STRIP_TEXTURE_SIZE: u32 = 256u; // Flags // See lines.rs#LineStripFlags @@ -87,9 +84,7 @@ struct LineStripData { // Read and unpack line strip data at a given location fn read_strip_data(idx: u32) -> LineStripData { - // can be u32 once https://github.com/gfx-rs/naga/issues/1997 is solved - let idx = i32(idx); - let coord = IVec2(idx % LINE_STRIP_TEXTURE_SIZE, idx / LINE_STRIP_TEXTURE_SIZE); + let coord = UVec2(idx % LINE_STRIP_TEXTURE_SIZE, idx / LINE_STRIP_TEXTURE_SIZE); var raw_data = textureLoad(position_data_texture, coord, 0).xy; var data: LineStripData; @@ -110,9 +105,7 @@ struct PositionData { // Read and unpack position data at a given location fn read_position_data(idx: u32) -> PositionData { - // can be u32 once https://github.com/gfx-rs/naga/issues/1997 is solved - let idx = i32(idx); - var raw_data = textureLoad(line_strip_texture, IVec2(idx % POSITION_TEXTURE_SIZE, idx / POSITION_TEXTURE_SIZE), 0); + var raw_data = textureLoad(line_strip_texture, UVec2(idx % POSITION_TEXTURE_SIZE, idx / POSITION_TEXTURE_SIZE), 0); var data: PositionData; let pos_4d = batch.world_from_obj * Vec4(raw_data.xyz, 1.0); @@ -198,7 +191,7 @@ fn vs_main(@builtin(vertex_index) vertex_idx: u32) -> VertexOut { quad_dir = pos_data_quad_after.pos - pos_data_quad_end.pos; // Go one pos data forward. } else if is_cap_triangle { // Discard vertex. - center_position = Vec3(0.0/0.0, 0.0/0.0, 0.0/0.0); + center_position = Vec3(f32max); } else { quad_dir = pos_data_quad_end.pos - pos_data_quad_begin.pos; } diff --git a/crates/re_renderer/shader/point_cloud.wgsl b/crates/re_renderer/shader/point_cloud.wgsl index dc6efe6df9b5..a55404230692 100644 --- a/crates/re_renderer/shader/point_cloud.wgsl +++ b/crates/re_renderer/shader/point_cloud.wgsl @@ -36,10 +36,7 @@ var batch: BatchUniformBuffer; // Flags // See point_cloud.rs#PointCloudBatchFlags const ENABLE_SHADING: u32 = 1u; - -// textureLoad needs i32 right now, so we use that with all sizes & indices to avoid casts -// https://github.com/gfx-rs/naga/issues/1997 -var TEXTURE_SIZE: i32 = 2048; +const TEXTURE_SIZE: u32 = 2048u; struct VertexOut { @builtin(position) @@ -75,8 +72,8 @@ struct PointData { } // Read and unpack data at a given location -fn read_data(idx: i32) -> PointData { - let coord = IVec2(i32(idx % TEXTURE_SIZE), idx / TEXTURE_SIZE); +fn read_data(idx: u32) -> PointData { + let coord = UVec2(idx % TEXTURE_SIZE, idx / TEXTURE_SIZE); let position_data = textureLoad(position_data_texture, coord, 0); let color = textureLoad(color_texture, coord, 0); diff --git a/crates/re_renderer/shader/rectangle_fs.wgsl b/crates/re_renderer/shader/rectangle_fs.wgsl index 0d1a35cad961..62f65952fe21 100644 --- a/crates/re_renderer/shader/rectangle_fs.wgsl +++ b/crates/re_renderer/shader/rectangle_fs.wgsl @@ -86,10 +86,12 @@ fn fs_main(in: VertexOut) -> @location(0) Vec4 { let colormap_size = textureDimensions(colormap_texture).xy; let color_index = normalized_value.r * f32(colormap_size.x * colormap_size.y); // TODO(emilk): interpolate between neighboring colors for non-integral color indices - let color_index_i32 = i32(color_index); - let x = color_index_i32 % colormap_size.x; - let y = color_index_i32 / colormap_size.x; - texture_color = textureLoad(colormap_texture, IVec2(x, y), 0); + // It's important to round here since otherwise numerical instability can push us to the adjacent class-id + // See: https://github.com/rerun-io/rerun/issues/1968 + let color_index_u32 = u32(round(color_index)); + let x = color_index_u32 % colormap_size.x; + let y = color_index_u32 / colormap_size.x; + texture_color = textureLoad(colormap_texture, UVec2(x, y), 0); } else { return ERROR_RGBA; // unknown color mapper } diff --git a/crates/re_renderer/shader/screen_triangle_vertex.wgsl b/crates/re_renderer/shader/screen_triangle_vertex.wgsl index 224da3317d4b..e42fac7827a6 100644 --- a/crates/re_renderer/shader/screen_triangle_vertex.wgsl +++ b/crates/re_renderer/shader/screen_triangle_vertex.wgsl @@ -4,8 +4,10 @@ struct VertexOutput { // Mark output position as invariant so it's safe to use it with depth test Equal. // Without @invariant, different usages in different render pipelines might optimize differently, // causing slightly different results. - @invariant @builtin(position) position: Vec4, - @location(0) texcoord: Vec2, + @invariant @builtin(position) + position: Vec4, + @location(0) + texcoord: Vec2, }; // Workaround for https://github.com/gfx-rs/naga/issues/2252 diff --git a/crates/re_renderer/shader/types.wgsl b/crates/re_renderer/shader/types.wgsl index 3323c7a6cd1f..6355bcc668b5 100644 --- a/crates/re_renderer/shader/types.wgsl +++ b/crates/re_renderer/shader/types.wgsl @@ -1,16 +1,16 @@ // Names chosen to match [`glam`](https://docs.rs/glam/latest/glam/) -type Vec2 = vec2; -type Vec3 = vec3; -type Vec4 = vec4; -type UVec2 = vec2; -type UVec3 = vec3; -type UVec4 = vec4; -type IVec2 = vec2; -type IVec3 = vec3; -type IVec4 = vec4; -type Mat3 = mat3x3; -type Mat4x3 = mat4x3; -type Mat4 = mat4x4; +alias Vec2 = vec2; +alias Vec3 = vec3; +alias Vec4 = vec4; +alias UVec2 = vec2; +alias UVec3 = vec3; +alias UVec4 = vec4; +alias IVec2 = vec2; +alias IVec3 = vec3; +alias IVec4 = vec4; +alias Mat3 = mat3x3; +alias Mat4x3 = mat4x3; +alias Mat4 = mat4x4; // Extreme values as documented by the spec: // https://www.w3.org/TR/WGSL/#floating-point-types @@ -22,7 +22,7 @@ const f32min_normal = 0x1p-126f; // Smallest positive normal float value. //const f16max = 0x1.ffcp+15h; // Largest positive float value. //const f16min_normal = 0x1p-14h; // Smallest positive normal float value. // https://www.w3.org/TR/WGSL/#integer-types -const i32min = -0x80000000i; +const i32min = -2147483648; // Naga has some issues with correct negative hexadecimal numbers https://github.com/gfx-rs/naga/issues/2314 const i32max = 0x7fffffffi; const u32min = 0u; const u32max = 0xffffffffu; diff --git a/crates/re_renderer/shader/utils/sphere_quad.wgsl b/crates/re_renderer/shader/utils/sphere_quad.wgsl index ccdd4b771a0b..00937d8e7701 100644 --- a/crates/re_renderer/shader/utils/sphere_quad.wgsl +++ b/crates/re_renderer/shader/utils/sphere_quad.wgsl @@ -56,8 +56,8 @@ fn sphere_quad_span_orthographic(point_pos: Vec3, point_radius: f32, top_bottom: } /// Returns the index of the current quad. -fn sphere_quad_index(vertex_idx: u32) -> i32 { - return i32(vertex_idx) / 6; +fn sphere_quad_index(vertex_idx: u32) -> u32 { + return vertex_idx / 6u; } struct SphereQuadData { diff --git a/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs b/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs index db1a1d085a90..6bc86a60d112 100644 --- a/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs +++ b/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs @@ -1,6 +1,9 @@ -use std::{num::NonZeroU32, sync::mpsc}; +use std::sync::mpsc; -use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, Texture2DBufferInfo}; +use crate::{ + texture_info::Texture2DBufferInfo, + wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool}, +}; /// A sub-allocated staging buffer that can be written to. /// @@ -119,7 +122,7 @@ where buffer: &self.chunk_buffer, layout: wgpu::ImageDataLayout { offset: self.byte_offset_in_chunk_buffer, - bytes_per_row: NonZeroU32::new(buffer_info.bytes_per_row_padded), + bytes_per_row: Some(buffer_info.bytes_per_row_padded), rows_per_image: None, }, }, @@ -290,7 +293,7 @@ impl CpuWriteGpuReadBelt { ); // Largest uncompressed texture format (btw. many compressed texture format have the same block size!) debug_assert!( - wgpu::TextureFormat::Rgba32Uint.describe().block_size as u64 + wgpu::TextureFormat::Rgba32Uint.block_size(None).unwrap() as u64 <= CpuWriteGpuReadBelt::MIN_OFFSET_ALIGNMENT ); diff --git a/crates/re_renderer/src/allocator/gpu_readback_belt.rs b/crates/re_renderer/src/allocator/gpu_readback_belt.rs index 8e5f413743e9..d20e231cb269 100644 --- a/crates/re_renderer/src/allocator/gpu_readback_belt.rs +++ b/crates/re_renderer/src/allocator/gpu_readback_belt.rs @@ -1,6 +1,7 @@ -use std::{num::NonZeroU32, ops::Range, sync::mpsc}; +use std::{ops::Range, sync::mpsc}; -use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, Texture2DBufferInfo}; +use crate::texture_info::Texture2DBufferInfo; +use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool}; /// Identifier used to identify a buffer upon retrieval of the data. /// @@ -16,6 +17,12 @@ struct PendingReadbackRange { user_data: GpuReadbackUserDataStorage, } +#[derive(thiserror::Error, Debug)] +pub enum GpuReadbackError { + #[error("Texture format {0:?} is not supported for readback.")] + UnsupportedTextureFormatForReadback(wgpu::TextureFormat), +} + /// A reserved slice for GPU readback. /// /// Readback needs to happen from a buffer/texture with copy-source usage, @@ -36,8 +43,8 @@ impl GpuReadbackBuffer { encoder: &mut wgpu::CommandEncoder, source: wgpu::ImageCopyTexture<'_>, copy_extents: glam::UVec2, - ) { - self.read_multiple_texture2d(encoder, &[(source, copy_extents)]); + ) -> Result<(), GpuReadbackError> { + self.read_multiple_texture2d(encoder, &[(source, copy_extents)]) } /// Reads multiple textures into the same buffer. @@ -54,11 +61,17 @@ impl GpuReadbackBuffer { mut self, encoder: &mut wgpu::CommandEncoder, sources_and_extents: &[(wgpu::ImageCopyTexture<'_>, glam::UVec2)], - ) { + ) -> Result<(), GpuReadbackError> { for (source, copy_extents) in sources_and_extents { let start_offset = wgpu::util::align_to( self.range_in_chunk.start, - source.texture.format().describe().block_size as u64, + source + .texture + .format() + .block_size(Some(source.aspect)) + .ok_or(GpuReadbackError::UnsupportedTextureFormatForReadback( + source.texture.format(), + ))? as u64, ); let buffer_info = Texture2DBufferInfo::new(source.texture.format(), *copy_extents); @@ -75,7 +88,7 @@ impl GpuReadbackBuffer { buffer: &self.chunk_buffer, layout: wgpu::ImageDataLayout { offset: start_offset, - bytes_per_row: NonZeroU32::new(buffer_info.bytes_per_row_padded), + bytes_per_row: Some(buffer_info.bytes_per_row_padded), rows_per_image: None, }, }, @@ -89,6 +102,7 @@ impl GpuReadbackBuffer { self.range_in_chunk = (start_offset + buffer_info.buffer_size_padded)..self.range_in_chunk.end; } + Ok(()) } // TODO(andreas): Unused & untested so far! diff --git a/crates/re_renderer/src/allocator/mod.rs b/crates/re_renderer/src/allocator/mod.rs index 11a092b1b374..382376416d54 100644 --- a/crates/re_renderer/src/allocator/mod.rs +++ b/crates/re_renderer/src/allocator/mod.rs @@ -9,7 +9,8 @@ mod uniform_buffer_fill; pub use cpu_write_gpu_read_belt::{CpuWriteGpuReadBelt, CpuWriteGpuReadBuffer}; pub use gpu_readback_belt::{ - GpuReadbackBelt, GpuReadbackBuffer, GpuReadbackIdentifier, GpuReadbackUserDataStorage, + GpuReadbackBelt, GpuReadbackBuffer, GpuReadbackError, GpuReadbackIdentifier, + GpuReadbackUserDataStorage, }; pub use uniform_buffer_fill::{ create_and_fill_uniform_buffer, create_and_fill_uniform_buffer_batch, diff --git a/crates/re_renderer/src/colormap.rs b/crates/re_renderer/src/colormap.rs index 15cd98d5dc14..e4a4927ed3fe 100644 --- a/crates/re_renderer/src/colormap.rs +++ b/crates/re_renderer/src/colormap.rs @@ -18,6 +18,7 @@ pub enum Colormap { Plasma = 4, Turbo = 5, Viridis = 6, + AlbedoTexture = 7, } impl Colormap { @@ -40,6 +41,7 @@ impl std::fmt::Display for Colormap { Colormap::Plasma => write!(f, "Plasma"), Colormap::Turbo => write!(f, "Turbo"), Colormap::Viridis => write!(f, "Viridis"), + Colormap::AlbedoTexture => write!(f, "AlbedoTexture"), } } } @@ -52,6 +54,10 @@ pub fn colormap_srgb(which: Colormap, t: f32) -> [u8; 4] { Colormap::Plasma => colormap_plasma_srgb(t), Colormap::Magma => colormap_magma_srgb(t), Colormap::Inferno => colormap_inferno_srgb(t), + Colormap::AlbedoTexture => { + re_log::error_once!("Trying to do texture sampling on the CPU"); + [0; 4] + } } } diff --git a/crates/re_renderer/src/config.rs b/crates/re_renderer/src/config.rs index 91e0315d401d..7b85283b505f 100644 --- a/crates/re_renderer/src/config.rs +++ b/crates/re_renderer/src/config.rs @@ -2,6 +2,11 @@ /// /// To reduce complexity, we don't do fine-grained feature checks, /// but instead support set of features, each a superset of the next. +/// +/// Tiers are sorted from lowest to highest. Certain tiers may not be possible on a given machine/setup, +/// but choosing lower tiers is always possible. +/// Tiers may loosely relate to quality settings, but their primary function is an easier way to +/// do bundle feature *support* checks. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum HardwareTier { /// Limited feature support as provided by WebGL and native GLES2/OpenGL3(ish). @@ -34,18 +39,22 @@ impl HardwareTier { } } -impl Default for HardwareTier { - fn default() -> Self { - // Use "Basic" tier for actual web but also if someone forces the GL backend! - if supported_backends() == wgpu::Backends::GL { - HardwareTier::Gles - } else { - HardwareTier::FullWebGpuSupport +impl HardwareTier { + /// Picks the highest possible tier for a given adapter. + /// + /// Note that it is always possible to pick a lower tier! + pub fn from_adapter(adapter: &wgpu::Adapter) -> Self { + match adapter.get_info().backend { + wgpu::Backend::Vulkan + | wgpu::Backend::Metal + | wgpu::Backend::Dx12 + | wgpu::Backend::BrowserWebGpu => HardwareTier::FullWebGpuSupport, + + // Dx11 support in wgpu is sporadic, treat it like GLES to be on the safe side. + wgpu::Backend::Dx11 | wgpu::Backend::Gl | wgpu::Backend::Empty => HardwareTier::Gles, } } -} -impl HardwareTier { /// Wgpu limits required by the given hardware tier. pub fn limits(self) -> wgpu::Limits { wgpu::Limits { @@ -127,22 +136,19 @@ pub struct RenderContextConfig { /// /// Other backend might work as well, but lack of support isn't regarded as a bug. pub fn supported_backends() -> wgpu::Backends { - // Native. - // Only use Vulkan & Metal unless explicitly told so since this reduces surfaces and thus surprises. - // - // Bunch of cases where it's still useful to switch though: - // * Some Windows VMs only provide DX12 drivers, observed with Parallels on Apple Silicon - // * May run into Linux issues that warrant trying out the GL backend. - // - // For changing the backend we use standard wgpu env var, i.e. WGPU_BACKEND. - #[cfg(not(target_arch = "wasm32"))] - { + if cfg!(target_arch = "wasm32") { + // Web - WebGL is used automatically when wgpu is compiled with `webgl` feature. + wgpu::Backends::GL | wgpu::Backends::BROWSER_WEBGPU + } else { + // Native. + // Only use Vulkan & Metal unless explicitly told so since this reduces surfaces and thus surprises. + // + // Bunch of cases where it's still useful to switch though: + // * Some Windows VMs only provide DX12 drivers, observed with Parallels on Apple Silicon + // * May run into Linux issues that warrant trying out the GL backend. + // + // For changing the backend we use standard wgpu env var, i.e. WGPU_BACKEND. wgpu::util::backend_bits_from_env() .unwrap_or(wgpu::Backends::VULKAN | wgpu::Backends::METAL) } - // Web - we support only WebGL right now, WebGPU should work but hasn't been tested. - #[cfg(target_arch = "wasm32")] - { - wgpu::Backends::GL - } } diff --git a/crates/re_renderer/src/context.rs b/crates/re_renderer/src/context.rs index 701d1800d561..a2c8d032b819 100644 --- a/crates/re_renderer/src/context.rs +++ b/crates/re_renderer/src/context.rs @@ -106,6 +106,7 @@ impl RenderContext { const MAX_NUM_INFLIGHT_QUEUE_SUBMISSIONS: usize = 4; pub fn new( + adapter: &wgpu::Adapter, device: Arc, queue: Arc, config: RenderContextConfig, @@ -138,7 +139,16 @@ impl RenderContext { config.hardware_tier.features(), device.features(), ); - // Can't check downlevel feature flags since they sit on the adapter, not on the device. + assert!(adapter.get_downlevel_capabilities().flags.contains(config.hardware_tier.required_downlevel_capabilities().flags), + "The given device doesn't support the required downlevel capabilities for the given hardware tier {:?}. + Required: + {:?} + Actual: + {:?}", + config.hardware_tier, + config.hardware_tier.required_downlevel_capabilities(), + adapter.get_downlevel_capabilities(), + ); // In debug builds, make sure to catch all errors, never crash, and try to // always let the user find a way to return a poisoned pipeline back into a @@ -178,6 +188,21 @@ impl RenderContext { frame_index: 0, }; + // Register shader workarounds for the current device. + if adapter.get_info().backend == wgpu::Backend::BrowserWebGpu { + // Chrome/Tint does not support `@invariant` when targeting Metal. + // https://bugs.chromium.org/p/chromium/issues/detail?id=1439273 + // (bug is fixed as of writing, but hasn't hit any public released version yet) + // Ignoring it is fine in the cases we use it, it's mostly there to avoid a (correct!) warning in wgpu. + gpu_resources + .shader_modules + .shader_text_workaround_replacements + .push(( + "@invariant @builtin(position)".to_owned(), + "@builtin(position)".to_owned(), + )); + } + RenderContext { device, queue, diff --git a/crates/re_renderer/src/draw_phases/mod.rs b/crates/re_renderer/src/draw_phases/mod.rs index fe7ce542f245..f77d70e9ec5c 100644 --- a/crates/re_renderer/src/draw_phases/mod.rs +++ b/crates/re_renderer/src/draw_phases/mod.rs @@ -6,7 +6,8 @@ pub use outlines::{OutlineConfig, OutlineMaskPreference, OutlineMaskProcessor}; mod picking_layer; pub use picking_layer::{ - PickingLayerId, PickingLayerInstanceId, PickingLayerObjectId, PickingLayerProcessor, + PickingLayerError, PickingLayerId, PickingLayerInstanceId, PickingLayerObjectId, + PickingLayerProcessor, }; mod screenshot; diff --git a/crates/re_renderer/src/draw_phases/picking_layer.rs b/crates/re_renderer/src/draw_phases/picking_layer.rs index dc5cf38f033f..cd4d6601ebb0 100644 --- a/crates/re_renderer/src/draw_phases/picking_layer.rs +++ b/crates/re_renderer/src/draw_phases/picking_layer.rs @@ -13,11 +13,12 @@ use crate::{ allocator::create_and_fill_uniform_buffer, global_bindings::FrameUniformBuffer, include_shader_module, + texture_info::Texture2DBufferInfo, view_builder::ViewBuilder, wgpu_resources::{ BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuRenderPipelineHandle, GpuTexture, GpuTextureHandle, PipelineLayoutDesc, PoolError, RenderPipelineDesc, - Texture2DBufferInfo, TextureDesc, WgpuResourcePools, + TextureDesc, WgpuResourcePools, }, DebugLabel, GpuReadbackBuffer, GpuReadbackIdentifier, IntRect, RenderContext, }; @@ -132,6 +133,15 @@ pub fn pixel_coord_to_ndc(coord: glam::Vec2, target_resolution: glam::Vec2) -> g ) } +#[derive(thiserror::Error, Debug)] +pub enum PickingLayerError { + #[error(transparent)] + ReadbackError(#[from] crate::allocator::GpuReadbackError), + + #[error(transparent)] + ResourcePoolError(#[from] PoolError), +} + /// Manages the rendering of the picking layer pass, its render targets & readback buffer. /// /// The view builder creates this for every frame that requests a picking result. @@ -278,8 +288,10 @@ impl PickingLayerProcessor { // Offset of the depth buffer in the readback buffer needs to be aligned to size of a depth pixel. // This is "trivially true" if the size of the depth format is a multiple of the size of the id format. debug_assert!( - Self::PICKING_LAYER_FORMAT.describe().block_size - % Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size + Self::PICKING_LAYER_FORMAT.block_size(None).unwrap() + % Self::PICKING_LAYER_DEPTH_FORMAT + .block_size(Some(wgpu::TextureAspect::DepthOnly)) + .unwrap() == 0 ); let buffer_size = row_info_id.buffer_size_padded + row_info_depth.buffer_size_padded; @@ -342,7 +354,7 @@ impl PickingLayerProcessor { self, encoder: &mut wgpu::CommandEncoder, pools: &WgpuResourcePools, - ) -> Result<(), PoolError> { + ) -> Result<(), PickingLayerError> { let extent = glam::uvec2( self.picking_target.texture.width(), self.picking_target.texture.height(), @@ -373,12 +385,16 @@ impl PickingLayerProcessor { texture: &readable_depth_texture.texture, mip_level: 0, origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, + aspect: if self.depth_readback_workaround.is_some() { + wgpu::TextureAspect::All + } else { + wgpu::TextureAspect::DepthOnly + }, }, extent, ), ], - ); + )?; Ok(()) } @@ -401,11 +417,13 @@ impl PickingLayerProcessor { .readback_data::>(identifier, |data, metadata| { // Assert that our texture data reinterpretation works out from a pixel size point of view. debug_assert_eq!( - Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size as usize, - std::mem::size_of::() + Self::PICKING_LAYER_DEPTH_FORMAT + .block_size(Some(wgpu::TextureAspect::DepthOnly)) + .unwrap(), + std::mem::size_of::() as u32 ); debug_assert_eq!( - Self::PICKING_LAYER_FORMAT.describe().block_size as usize, + Self::PICKING_LAYER_FORMAT.block_size(None).unwrap() as usize, std::mem::size_of::() ); @@ -432,8 +450,8 @@ impl PickingLayerProcessor { // See https://github.com/gfx-rs/wgpu/issues/3644 debug_assert_eq!( DepthReadbackWorkaround::READBACK_FORMAT - .describe() - .block_size as usize, + .block_size(None) + .unwrap() as usize, std::mem::size_of::() * 4 ); picking_depth_data = picking_depth_data.into_iter().step_by(4).collect(); diff --git a/crates/re_renderer/src/draw_phases/screenshot.rs b/crates/re_renderer/src/draw_phases/screenshot.rs index 68c05b3b545c..79cac54c1a6a 100644 --- a/crates/re_renderer/src/draw_phases/screenshot.rs +++ b/crates/re_renderer/src/draw_phases/screenshot.rs @@ -11,7 +11,9 @@ //! Or alternatively try to render the images in several tiles 🤔. In any case this would greatly improve quality! use crate::{ - wgpu_resources::{GpuTexture, Texture2DBufferInfo, TextureDesc}, + allocator::GpuReadbackError, + texture_info::Texture2DBufferInfo, + wgpu_resources::{GpuTexture, TextureDesc}, DebugLabel, GpuReadbackBuffer, GpuReadbackIdentifier, RenderContext, }; @@ -95,7 +97,10 @@ impl ScreenshotProcessor { pass } - pub fn end_render_pass(self, encoder: &mut wgpu::CommandEncoder) { + pub fn end_render_pass( + self, + encoder: &mut wgpu::CommandEncoder, + ) -> Result<(), GpuReadbackError> { self.screenshot_readback_buffer.read_texture2d( encoder, wgpu::ImageCopyTexture { @@ -108,7 +113,7 @@ impl ScreenshotProcessor { self.screenshot_texture.texture.width(), self.screenshot_texture.texture.height(), ), - ); + ) } /// Returns the oldest received screenshot results for a given identifier and user data type. diff --git a/crates/re_renderer/src/global_bindings.rs b/crates/re_renderer/src/global_bindings.rs index c00ad0315c67..33fdfa4b2191 100644 --- a/crates/re_renderer/src/global_bindings.rs +++ b/crates/re_renderer/src/global_bindings.rs @@ -82,14 +82,14 @@ impl GlobalBindings { // Sampler without any filtering. wgpu::BindGroupLayoutEntry { binding: 1, - visibility: wgpu::ShaderStages::FRAGMENT, + visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::NonFiltering), count: None, }, // Trilinear sampler. wgpu::BindGroupLayoutEntry { binding: 2, - visibility: wgpu::ShaderStages::FRAGMENT, + visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), count: None, }, diff --git a/crates/re_renderer/src/importer/gltf.rs b/crates/re_renderer/src/importer/gltf.rs index 9d628931c6f3..ba259c9f30bb 100644 --- a/crates/re_renderer/src/importer/gltf.rs +++ b/crates/re_renderer/src/importer/gltf.rs @@ -68,8 +68,16 @@ pub fn load_gltf_from_buffer( }; images_as_textures.push( - ctx.texture_manager_2d - .create(&mut ctx.gpu_resources.textures, &texture), + match ctx + .texture_manager_2d + .create(&mut ctx.gpu_resources.textures, &texture) + { + Ok(texture) => texture, + Err(err) => { + re_log::error!("Failed to create texture: {err}"); + ctx.texture_manager_2d.white_texture_unorm_handle().clone() + } + }, ); } diff --git a/crates/re_renderer/src/lib.rs b/crates/re_renderer/src/lib.rs index 770c0589f7fe..1932e1aeb9e2 100644 --- a/crates/re_renderer/src/lib.rs +++ b/crates/re_renderer/src/lib.rs @@ -12,6 +12,7 @@ pub mod importer; pub mod mesh; pub mod renderer; pub mod resource_managers; +pub mod texture_info; pub mod view_builder; mod allocator; diff --git a/crates/re_renderer/src/renderer/debug_overlay.rs b/crates/re_renderer/src/renderer/debug_overlay.rs index 6e615cd4a710..5b78ac3e3139 100644 --- a/crates/re_renderer/src/renderer/debug_overlay.rs +++ b/crates/re_renderer/src/renderer/debug_overlay.rs @@ -49,6 +49,12 @@ pub struct DebugOverlayRenderer { bind_group_layout: GpuBindGroupLayoutHandle, } +#[derive(thiserror::Error, Debug)] +pub enum DebugOverlayError { + #[error("Can't display texture with format: {0:?}")] + UnsupportedTextureFormat(wgpu::TextureFormat), +} + /// Debug overlay for quick & dirty display of texture contents. /// /// Executed as part of the composition draw phase in order to allow "direct" output to the screen. @@ -70,7 +76,7 @@ impl DebugOverlayDrawData { debug_texture: &GpuTexture, screen_resolution: glam::UVec2, overlay_rect: IntRect, - ) -> Self { + ) -> Result { let mut renderers = ctx.renderers.write(); let debug_overlay = renderers.get_or_create::<_, DebugOverlayRenderer>( &ctx.shared_renderer_data, @@ -79,13 +85,22 @@ impl DebugOverlayDrawData { &mut ctx.resolver, ); - let mode = match debug_texture.texture.format().describe().sample_type { - wgpu::TextureSampleType::Depth | wgpu::TextureSampleType::Float { .. } => { + let mode = match debug_texture + .texture + .format() + .sample_type(Some(wgpu::TextureAspect::All)) + { + Some(wgpu::TextureSampleType::Depth | wgpu::TextureSampleType::Float { .. }) => { gpu_data::DebugOverlayMode::ShowFloatTexture } - wgpu::TextureSampleType::Sint | wgpu::TextureSampleType::Uint => { + Some(wgpu::TextureSampleType::Sint | wgpu::TextureSampleType::Uint) => { gpu_data::DebugOverlayMode::ShowUintTexture } + None => { + return Err(DebugOverlayError::UnsupportedTextureFormat( + debug_texture.texture.format(), + )) + } }; let uniform_buffer_binding = create_and_fill_uniform_buffer( @@ -112,7 +127,7 @@ impl DebugOverlayDrawData { ), }; - DebugOverlayDrawData { + Ok(DebugOverlayDrawData { bind_group: ctx.gpu_resources.bind_groups.alloc( &ctx.device, &ctx.gpu_resources, @@ -126,7 +141,7 @@ impl DebugOverlayDrawData { layout: debug_overlay.bind_group_layout, }, ), - } + }) } } diff --git a/crates/re_renderer/src/renderer/depth_cloud.rs b/crates/re_renderer/src/renderer/depth_cloud.rs index 285c0a2f9fd0..a74e4e9e0a96 100644 --- a/crates/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/re_renderer/src/renderer/depth_cloud.rs @@ -11,18 +11,18 @@ //! The vertex shader backprojects the depth texture using the user-specified intrinsics, and then //! behaves pretty much exactly like our point cloud renderer (see [`point_cloud.rs`]). +use itertools::Itertools; use smallvec::smallvec; use crate::{ allocator::create_and_fill_uniform_buffer_batch, draw_phases::{DrawPhase, OutlineMaskProcessor}, include_shader_module, - resource_managers::ResourceManagerError, + resource_managers::{GpuTexture2D, ResourceManagerError}, view_builder::ViewBuilder, wgpu_resources::{ BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuBindGroupLayoutHandle, - GpuRenderPipelineHandle, GpuTexture, PipelineLayoutDesc, RenderPipelineDesc, - Texture2DBufferInfo, TextureDesc, + GpuRenderPipelineHandle, GpuTexture, PipelineLayoutDesc, RenderPipelineDesc, TextureDesc, }, Colormap, OutlineMaskPreference, PickingLayerObjectId, PickingLayerProcessor, }; @@ -34,8 +34,19 @@ use super::{ // --- +#[derive(Debug, Clone, Copy)] +enum AlbedoColorSpace { + RGB, + MONO, +} + mod gpu_data { - use crate::{wgpu_buffer_types, PickingLayerObjectId}; + use crate::{ + wgpu_buffer_types::{self, U32RowPadded}, + PickingLayerObjectId, + }; + + use super::{AlbedoColorSpace, DepthCloudAlbedoData}; /// Keep in sync with mirror in `depth_cloud.wgsl.` #[repr(C, align(256))] @@ -50,7 +61,7 @@ mod gpu_data { pub picking_layer_object_id: PickingLayerObjectId, /// Multiplier to get world-space depth from whatever is in the texture. - pub world_depth_from_texture_value: f32, + pub world_depth_from_texture_depth: f32, /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, @@ -61,10 +72,13 @@ mod gpu_data { /// Which colormap should be used. pub colormap: u32, + /// Is the albedo texture rgb or mono + pub albedo_color_space: wgpu_buffer_types::U32RowPadded, + /// Changes over different draw-phases. pub radius_boost_in_ui_points: wgpu_buffer_types::F32RowPadded, - pub end_padding: [wgpu_buffer_types::PaddingRow; 16 - 4 - 3 - 1 - 1 - 1], + pub end_padding: [wgpu_buffer_types::PaddingRow; 16 - 4 - 3 - 1 - 1 - 1 - 1], } impl DepthCloudInfoUBO { @@ -75,31 +89,35 @@ mod gpu_data { let super::DepthCloud { world_from_obj, depth_camera_intrinsics, - world_depth_from_data_depth, + world_depth_from_texture_depth, point_radius_from_world_depth, max_depth_in_world, depth_dimensions: _, - depth_data, + depth_texture: _, colormap, outline_mask_id, picking_object_id, + albedo_dimensions, + albedo_data, } = depth_cloud; - let user_depth_from_texture_value = match depth_data { - super::DepthCloudDepthData::U16(_) => 65535.0, // un-normalize - super::DepthCloudDepthData::F32(_) => 1.0, - }; - let world_depth_from_texture_value = - world_depth_from_data_depth * user_depth_from_texture_value; - Self { world_from_obj: (*world_from_obj).into(), depth_camera_intrinsics: (*depth_camera_intrinsics).into(), outline_mask_id: outline_mask_id.0.unwrap_or_default().into(), - world_depth_from_texture_value, + world_depth_from_texture_depth: *world_depth_from_texture_depth, point_radius_from_world_depth: *point_radius_from_world_depth, max_depth_in_world: *max_depth_in_world, colormap: *colormap as u32, + albedo_color_space: (depth_cloud + .albedo_data + .as_ref() + .map(|albedo_data| match albedo_data { + DepthCloudAlbedoData::Mono8(_) => AlbedoColorSpace::MONO, + _ => AlbedoColorSpace::RGB, + }) + .unwrap_or(AlbedoColorSpace::RGB) as u32) + .into(), radius_boost_in_ui_points: radius_boost_in_ui_points.into(), picking_layer_object_id: *picking_object_id, end_padding: Default::default(), @@ -108,30 +126,17 @@ mod gpu_data { } } -/// The raw data from a depth texture. -/// -/// This is either `u16` or `f32` values; in both cases the data will be uploaded to the shader -/// as-is. -/// For `u16`s, this results in a `Depth16Unorm` texture, otherwise an `R32Float`. -/// The reason we normalize `u16` is so that the shader can use a `float` texture in both cases. -/// However, it means we need to multiply the sampled value by `65535.0` in the shader to get -/// the actual depth. -/// -/// The shader assumes that this is normalized, linear, non-flipped depth using the camera -/// position as reference point (not the camera plane!). +/// The raw data for the (optional) albedo texture. // -// TODO(cmc): support more depth data types. -// TODO(cmc): expose knobs to linearize/normalize/flip/cam-to-plane depth. +// TODO(cmc): support more albedo data types. +// TODO(cmc): arrow buffers for u8... #[derive(Debug, Clone)] -pub enum DepthCloudDepthData { - U16(crate::Buffer), - F32(crate::Buffer), -} - -impl Default for DepthCloudDepthData { - fn default() -> Self { - Self::F32(Default::default()) - } +pub enum DepthCloudAlbedoData { + Rgb8(Vec), + Rgb8Srgb(Vec), + Rgba8(Vec), + Rgba8Srgb(Vec), + Mono8(Vec), } pub struct DepthCloud { @@ -143,8 +148,8 @@ pub struct DepthCloud { /// Only supports pinhole cameras at the moment. pub depth_camera_intrinsics: glam::Mat3, - /// Multiplier to get world-space depth from whatever is in [`Self::depth_data`]. - pub world_depth_from_data_depth: f32, + /// Multiplier to get world-space depth from whatever is in [`Self::depth_texture`]. + pub world_depth_from_texture_depth: f32, /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, @@ -155,10 +160,10 @@ pub struct DepthCloud { /// The dimensions of the depth texture in pixels. pub depth_dimensions: glam::UVec2, - /// The actual data from the depth texture. + /// The actual data for the depth texture. /// - /// See [`DepthCloudDepthData`] for more information. - pub depth_data: DepthCloudDepthData, + /// Only textures with sample type `Float` are supported. + pub depth_texture: GpuTexture2D, /// Configures color mapping mode. pub colormap: Colormap, @@ -168,6 +173,16 @@ pub struct DepthCloud { /// Picking object id that applies for the entire depth cloud. pub picking_object_id: PickingLayerObjectId, + + /// The dimensions of the (optional) albedo texture in pixels. + /// + /// Irrelevant if [`Self::albedo_data`] isn't set. + pub albedo_dimensions: glam::UVec2, + + /// The actual data for the (optional) albedo texture. + /// + /// If set, takes precedence over [`Self::colormap`]. + pub albedo_data: Option, } impl DepthCloud { @@ -223,11 +238,20 @@ impl DrawData for DepthCloudDrawData { type Renderer = DepthCloudRenderer; } +#[derive(thiserror::Error, Debug)] +pub enum DepthCloudDrawDataError { + #[error("Depth texture format was {0:?}, only formats with sample type float are supported")] + InvalidDepthTextureFormat(wgpu::TextureFormat), + + #[error(transparent)] + ResourceManagerError(#[from] ResourceManagerError), +} + impl DepthCloudDrawData { pub fn new( ctx: &mut RenderContext, depth_clouds: &DepthClouds, - ) -> Result { + ) -> Result { crate::profile_function!(); let DepthClouds { @@ -276,44 +300,72 @@ impl DepthCloudDrawData { depth_cloud_ubo_binding_outlines, depth_cloud_ubo_binding_opaque ) { - let depth_texture = match &depth_cloud.depth_data { - DepthCloudDepthData::U16(data) => { - if cfg!(target_arch = "wasm32") { - // Web: manual normalization because Depth16Unorm textures aren't supported on - // the web (and won't ever be on the WebGL backend, see - // https://github.com/gfx-rs/wgpu/issues/3537). - // - // TODO(cmc): use an RG8 texture and unpack it manually in the shader instead. - use itertools::Itertools as _; - let dataf32 = data - .as_slice() - .iter() - .map(|d| *d as f32 / u16::MAX as f32) + if !matches!( + depth_cloud.depth_texture.format().sample_type(None), + Some(wgpu::TextureSampleType::Float { filterable: _ }) + ) { + return Err(DepthCloudDrawDataError::InvalidDepthTextureFormat( + depth_cloud.depth_texture.format(), + )); + } + let albedo_texture = depth_cloud + .albedo_data + .as_ref() + .map(|data| match data { + DepthCloudAlbedoData::Rgba8(data) => create_and_upload_texture( + ctx, + depth_cloud.albedo_dimensions, + wgpu::TextureFormat::Rgba8Unorm, + data.as_slice(), + ), + DepthCloudAlbedoData::Rgba8Srgb(data) => create_and_upload_texture( + ctx, + depth_cloud.albedo_dimensions, + wgpu::TextureFormat::Rgba8UnormSrgb, + data.as_slice(), + ), + // TODO + DepthCloudAlbedoData::Rgb8(data) => { + let data = data + .chunks(3) + .into_iter() + .flat_map(|c| [c[0], c[1], c[2], 255]) .collect_vec(); create_and_upload_texture( ctx, - depth_cloud, - dataf32.as_slice(), - wgpu::TextureFormat::R32Float, + depth_cloud.albedo_dimensions, + wgpu::TextureFormat::Rgba8Unorm, + data.as_slice(), ) - } else { - // Native: We use Depth16Unorm over R16Unorm because the latter is behind a feature flag and doesn't work on WebGPU. + } + DepthCloudAlbedoData::Rgb8Srgb(data) => { + let data = data + .chunks(3) + .into_iter() + .flat_map(|c| [c[0], c[1], c[2], 255]) + .collect_vec(); create_and_upload_texture( ctx, - depth_cloud, + depth_cloud.albedo_dimensions, + wgpu::TextureFormat::Rgba8UnormSrgb, data.as_slice(), - wgpu::TextureFormat::Depth16Unorm, ) } - } - DepthCloudDepthData::F32(data) => create_and_upload_texture( - ctx, - depth_cloud, - data.as_slice(), - wgpu::TextureFormat::R32Float, - ), - }; - + DepthCloudAlbedoData::Mono8(data) => create_and_upload_texture( + ctx, + depth_cloud.albedo_dimensions, + wgpu::TextureFormat::R8Unorm, + data.as_slice(), + ), + }) + .unwrap_or_else(|| { + create_and_upload_texture( + ctx, + (1, 1).into(), + wgpu::TextureFormat::Rgba8Unorm, + [0u8; 4].as_slice(), + ) + }); let mk_bind_group = |label, ubo: BindGroupEntry| { ctx.gpu_resources.bind_groups.alloc( &ctx.device, @@ -322,7 +374,8 @@ impl DepthCloudDrawData { label, entries: smallvec![ ubo, - BindGroupEntry::DefaultTextureView(depth_texture.handle), + BindGroupEntry::DefaultTextureView(depth_cloud.depth_texture.handle), + BindGroupEntry::DefaultTextureView(albedo_texture.handle), ], layout: bg_layout, }, @@ -346,77 +399,53 @@ impl DepthCloudDrawData { fn create_and_upload_texture( ctx: &mut RenderContext, - depth_cloud: &DepthCloud, + dimensions: glam::UVec2, + format: wgpu::TextureFormat, data: &[T], - depth_format: wgpu::TextureFormat, ) -> GpuTexture { crate::profile_function!(); - let depth_texture_size = wgpu::Extent3d { - width: depth_cloud.depth_dimensions.x, - height: depth_cloud.depth_dimensions.y, + let texture_size = wgpu::Extent3d { + width: dimensions.x, + height: dimensions.y, depth_or_array_layers: 1, }; - let depth_texture_desc = TextureDesc { - label: "depth_texture".into(), - size: depth_texture_size, + let texture_desc = TextureDesc { + label: "texture".into(), + size: texture_size, mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, - format: depth_format, + format, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, }; - let depth_texture = ctx - .gpu_resources - .textures - .alloc(&ctx.device, &depth_texture_desc); - - // Not supporting compressed formats here. - debug_assert!(depth_texture_desc.format.describe().block_dimensions == (1, 1)); - - let buffer_info = - Texture2DBufferInfo::new(depth_texture_desc.format, depth_cloud.depth_dimensions); - - // TODO(andreas): CpuGpuWriteBelt should make it easier to do this. - let bytes_padding_per_row = - (buffer_info.bytes_per_row_padded - buffer_info.bytes_per_row_unpadded) as usize; - // Sanity check the padding size. If this happens something is seriously wrong, as it would imply - // that we can't express the required alignment with the block size. - debug_assert!( - bytes_padding_per_row % std::mem::size_of::() == 0, - "Padding is not a multiple of pixel size. Can't correctly pad the texture data" - ); + let texture = ctx.gpu_resources.textures.alloc(&ctx.device, &texture_desc); + + let format_info = texture_desc.format; + let width_blocks = dimensions.x / format_info.block_dimensions().0 as u32; + let bytes_per_row_unaligned = width_blocks * format_info.block_size(None).unwrap() as u32; + // TODO + let bytes_per_row = u32::max(bytes_per_row_unaligned, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT); - let mut depth_texture_staging = ctx.cpu_write_gpu_read_belt.lock().allocate::( + let mut texture_staging = ctx.cpu_write_gpu_read_belt.lock().allocate::( &ctx.device, &ctx.gpu_resources.buffers, - buffer_info.buffer_size_padded as usize / std::mem::size_of::(), + data.len(), ); + texture_staging.extend_from_slice(data); - // Fill with a single copy if possible, otherwise do multiple, filling in padding. - if bytes_padding_per_row == 0 { - depth_texture_staging.extend_from_slice(data); - } else { - let num_pixel_padding_per_row = bytes_padding_per_row / std::mem::size_of::(); - for row in data.chunks(depth_texture_desc.size.width as usize) { - depth_texture_staging.extend_from_slice(row); - depth_texture_staging - .extend(std::iter::repeat(T::zeroed()).take(num_pixel_padding_per_row)); - } - } - - depth_texture_staging.copy_to_texture2d( + texture_staging.copy_to_texture2d( ctx.active_frame.before_view_builder_encoder.lock().get(), wgpu::ImageCopyTexture { - texture: &depth_texture.inner.texture, + texture: &texture.inner.texture, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All, }, - depth_cloud.depth_dimensions, + glam::UVec2::new(texture_size.width, texture_size.height), ); - depth_texture + texture } pub struct DepthCloudRenderer { @@ -473,6 +502,16 @@ impl Renderer for DepthCloudRenderer { }, count: None, }, + wgpu::BindGroupLayoutEntry { + binding: 2, + visibility: wgpu::ShaderStages::VERTEX, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, ], }, ); diff --git a/crates/re_renderer/src/renderer/lines.rs b/crates/re_renderer/src/renderer/lines.rs index cf1222acfe1b..a46571928c63 100644 --- a/crates/re_renderer/src/renderer/lines.rs +++ b/crates/re_renderer/src/renderer/lines.rs @@ -104,10 +104,7 @@ //! * note that this would let us remove the degenerated quads between lines, making the approach cleaner and removing the "restart bit" //! -use std::{ - num::{NonZeroU32, NonZeroU64}, - ops::Range, -}; +use std::{num::NonZeroU64, ops::Range}; use bitflags::bitflags; use bytemuck::Zeroable; @@ -479,7 +476,7 @@ impl LineDrawData { bytemuck::cast_slice(&position_data_staging), wgpu::ImageDataLayout { offset: 0, - bytes_per_row: NonZeroU32::new( + bytes_per_row: Some( POSITION_TEXTURE_SIZE * std::mem::size_of::() as u32, ), rows_per_image: None, @@ -529,7 +526,7 @@ impl LineDrawData { bytemuck::cast_slice(&line_strip_info_staging), wgpu::ImageDataLayout { offset: 0, - bytes_per_row: NonZeroU32::new( + bytes_per_row: Some( LINE_STRIP_TEXTURE_SIZE * std::mem::size_of::() as u32, ), diff --git a/crates/re_renderer/src/renderer/mod.rs b/crates/re_renderer/src/renderer/mod.rs index 3b4284bc2a6d..b8b2bc508968 100644 --- a/crates/re_renderer/src/renderer/mod.rs +++ b/crates/re_renderer/src/renderer/mod.rs @@ -15,7 +15,7 @@ pub use point_cloud::{ mod depth_cloud; pub use self::depth_cloud::{ - DepthCloud, DepthCloudDepthData, DepthCloudDrawData, DepthCloudRenderer, DepthClouds, + DepthCloud, DepthCloudAlbedoData, DepthCloudDrawData, DepthCloudRenderer, DepthClouds, }; mod test_triangle; @@ -35,7 +35,7 @@ mod compositor; pub(crate) use compositor::CompositorDrawData; mod debug_overlay; -pub use debug_overlay::{DebugOverlayDrawData, DebugOverlayRenderer}; +pub use debug_overlay::{DebugOverlayDrawData, DebugOverlayError, DebugOverlayRenderer}; use crate::{ context::{RenderContext, SharedRendererData}, diff --git a/crates/re_renderer/src/renderer/point_cloud.rs b/crates/re_renderer/src/renderer/point_cloud.rs index 639db5a17ceb..03611b80a256 100644 --- a/crates/re_renderer/src/renderer/point_cloud.rs +++ b/crates/re_renderer/src/renderer/point_cloud.rs @@ -130,7 +130,7 @@ pub struct PointCloudBatchInfo { /// Defines an outline mask for an individual vertex ranges. /// - /// Vertex ranges are *not* relative within the current batch, but relates to the draw data vertex buffer. + /// Vertex ranges are relative within the current batch. /// /// Having many of these individual outline masks can be slow as they require each their own uniform buffer & draw call. /// This feature is meant for a limited number of "extra selections" @@ -455,6 +455,8 @@ impl PointCloudDrawData { )); for (range, _) in &batch_info.additional_outline_mask_ids_vertex_ranges { + let range = (range.start + start_point_for_next_batch) + ..(range.end + start_point_for_next_batch); batches_internal.push(point_renderer.create_point_cloud_batch( ctx, format!("{:?} strip-only {:?}", batch_info.label, range).into(), diff --git a/crates/re_renderer/src/renderer/rectangles.rs b/crates/re_renderer/src/renderer/rectangles.rs index 799655ecf78c..8b380a30a2bd 100644 --- a/crates/re_renderer/src/renderer/rectangles.rs +++ b/crates/re_renderer/src/renderer/rectangles.rs @@ -19,6 +19,7 @@ use crate::{ draw_phases::{DrawPhase, OutlineMaskProcessor}, include_shader_module, resource_managers::{GpuTexture2D, ResourceManagerError}, + texture_info, view_builder::ViewBuilder, wgpu_resources::{ BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuBindGroupLayoutHandle, @@ -33,7 +34,7 @@ use super::{ }; /// Texture filter setting for magnification (a texel covers several pixels). -#[derive(Debug)] +#[derive(Debug, Clone, Copy)] pub enum TextureFilterMag { Linear, Nearest, @@ -41,7 +42,7 @@ pub enum TextureFilterMag { } /// Texture filter setting for minification (several texels fall to one pixel). -#[derive(Debug)] +#[derive(Debug, Clone, Copy)] pub enum TextureFilterMin { Linear, Nearest, @@ -98,6 +99,7 @@ impl ColormappedTexture { } } +#[derive(Clone)] pub struct TexturedRect { /// Top left corner position in world space. pub top_left_corner_position: glam::Vec3, @@ -114,6 +116,7 @@ pub struct TexturedRect { pub options: RectangleOptions, } +#[derive(Clone)] pub struct RectangleOptions { pub texture_filter_magnification: TextureFilterMag, pub texture_filter_minification: TextureFilterMin, @@ -166,7 +169,7 @@ pub enum RectangleError { } mod gpu_data { - use crate::wgpu_buffer_types; + use crate::{texture_info, wgpu_buffer_types}; use super::{ColorMapper, RectangleError, TexturedRect}; @@ -214,9 +217,11 @@ mod gpu_data { } impl UniformBuffer { - pub fn from_textured_rect(rectangle: &super::TexturedRect) -> Result { + pub fn from_textured_rect( + rectangle: &super::TexturedRect, + device_features: wgpu::Features, + ) -> Result { let texture_format = rectangle.colormapped_texture.texture.format(); - let texture_info = texture_format.describe(); let TexturedRect { top_left_corner_position, @@ -241,25 +246,25 @@ mod gpu_data { outline_mask, } = options; - let sample_type = match texture_info.sample_type { - wgpu::TextureSampleType::Float { .. } => { - if super::is_float_filterable(&texture_format) { + let sample_type = match texture_format.sample_type(None) { + Some(wgpu::TextureSampleType::Float { .. }) => { + if texture_info::is_float_filterable(texture_format, device_features) { SAMPLE_TYPE_FLOAT_FILTER } else { SAMPLE_TYPE_FLOAT_NOFILTER } } - wgpu::TextureSampleType::Depth => { + Some(wgpu::TextureSampleType::Sint) => SAMPLE_TYPE_SINT_NOFILTER, + Some(wgpu::TextureSampleType::Uint) => SAMPLE_TYPE_UINT_NOFILTER, + _ => { return Err(RectangleError::DepthTexturesNotSupported); } - wgpu::TextureSampleType::Sint => SAMPLE_TYPE_SINT_NOFILTER, - wgpu::TextureSampleType::Uint => SAMPLE_TYPE_UINT_NOFILTER, }; let mut colormap_function = 0; let color_mapper_int; - match texture_info.components { + match texture_info::num_texture_components(texture_format) { 1 => match color_mapper { Some(ColorMapper::Function(colormap)) => { color_mapper_int = COLOR_MAPPER_FUNCTION; @@ -352,7 +357,7 @@ impl RectangleDrawData { // TODO(emilk): continue on error (skipping just that rectangle)? let uniform_buffers: Vec<_> = rectangles .iter() - .map(gpu_data::UniformBuffer::from_textured_rect) + .map(|rect| gpu_data::UniformBuffer::from_textured_rect(rect, ctx.device.features())) .try_collect()?; let uniform_buffer_bindings = create_and_fill_uniform_buffer_batch( @@ -387,10 +392,9 @@ impl RectangleDrawData { let texture = &rectangle.colormapped_texture.texture; let texture_format = texture.creation_desc.format; - let texture_description = texture_format.describe(); - if texture_description.required_features != Default::default() { + if texture_format.required_features() != Default::default() { return Err(RectangleError::SpecialFeatures( - texture_description.required_features, + texture_format.required_features(), )); } @@ -400,23 +404,23 @@ impl RectangleDrawData { let mut texture_sint = ctx.texture_manager_2d.zeroed_texture_sint().handle; let mut texture_uint = ctx.texture_manager_2d.zeroed_texture_uint().handle; - match texture_description.sample_type { - wgpu::TextureSampleType::Float { .. } => { - if is_float_filterable(&texture_format) { + match texture_format.sample_type(None) { + Some(wgpu::TextureSampleType::Float { .. }) => { + if texture_info::is_float_filterable(texture_format, ctx.device.features()) { texture_float_filterable = texture.handle; } else { texture_float_nofilter = texture.handle; } } - wgpu::TextureSampleType::Depth => { - return Err(RectangleError::DepthTexturesNotSupported); - } - wgpu::TextureSampleType::Sint => { + Some(wgpu::TextureSampleType::Sint) => { texture_sint = texture.handle; } - wgpu::TextureSampleType::Uint => { + Some(wgpu::TextureSampleType::Uint) => { texture_uint = texture.handle; } + _ => { + return Err(RectangleError::DepthTexturesNotSupported); + } } // We also set up an optional colormap texture. @@ -689,11 +693,3 @@ impl Renderer for RectangleRenderer { ] } } - -fn is_float_filterable(format: &wgpu::TextureFormat) -> bool { - format - .describe() - .guaranteed_format_features - .flags - .contains(wgpu::TextureFormatFeatureFlags::FILTERABLE) -} diff --git a/crates/re_renderer/src/resource_managers/mod.rs b/crates/re_renderer/src/resource_managers/mod.rs index 7e1eff185a79..d9455d266071 100644 --- a/crates/re_renderer/src/resource_managers/mod.rs +++ b/crates/re_renderer/src/resource_managers/mod.rs @@ -10,7 +10,10 @@ mod mesh_manager; pub use mesh_manager::{GpuMeshHandle, MeshManager}; mod texture_manager; -pub use texture_manager::{GpuTexture2D, Texture2DCreationDesc, TextureManager2D}; +pub use texture_manager::{ + GpuTexture2D, Texture2DCreationDesc, TextureCreationError, TextureManager2D, + TextureManager2DError, +}; mod resource_manager; pub use resource_manager::{ResourceHandle, ResourceLifeTime, ResourceManagerError}; diff --git a/crates/re_renderer/src/resource_managers/texture_manager.rs b/crates/re_renderer/src/resource_managers/texture_manager.rs index 980918cb8a48..80da2c5768fe 100644 --- a/crates/re_renderer/src/resource_managers/texture_manager.rs +++ b/crates/re_renderer/src/resource_managers/texture_manager.rs @@ -1,4 +1,4 @@ -use std::{num::NonZeroU32, sync::Arc}; +use std::sync::Arc; use ahash::{HashMap, HashSet}; @@ -75,7 +75,9 @@ pub struct Texture2DCreationDesc<'a> { pub label: DebugLabel, /// Data for the highest mipmap level. - /// Must be padded according to wgpu rules and ready for upload. + /// + /// Data is expected to be tightly packed. + /// I.e. it is *not* padded according to wgpu buffer->texture transfer rules, padding will happen on the fly if necessary. /// TODO(andreas): This should be a kind of factory function/builder instead which gets target memory passed in. pub data: std::borrow::Cow<'a, [u8]>, pub format: wgpu::TextureFormat, @@ -93,6 +95,41 @@ impl<'a> Texture2DCreationDesc<'a> { } } +// TODO(andreas): Move this to texture pool. +#[derive(thiserror::Error, Debug)] +pub enum TextureCreationError { + #[error("Texture with debug label {0:?} has zero width or height!")] + ZeroSize(DebugLabel), + + #[error( + "Texture with debug label {label:?} has a format {format:?} that data can't be transferred to!" + )] + UnsupportedFormatForTransfer { + label: DebugLabel, + format: wgpu::TextureFormat, + }, +} + +#[derive(thiserror::Error, Debug)] +pub enum TextureManager2DError { + /// Something went wrong when creating the GPU texture. + #[error(transparent)] + TextureCreation(#[from] TextureCreationError), + + /// Something went wrong in a user-callback. + #[error(transparent)] + DataCreation(DataCreationError), +} + +impl From> for TextureCreationError { + fn from(err: TextureManager2DError) -> Self { + match err { + TextureManager2DError::TextureCreation(texture_creation) => texture_creation, + TextureManager2DError::DataCreation(never) => match never {}, + } + } +} + /// Texture manager for 2D textures. /// /// The scope is intentionally limited to particular kinds of textures that currently @@ -144,7 +181,8 @@ impl TextureManager2D { width: 1, height: 1, }, - ); + ) + .expect("Failed to create white pixel texture!"); let zeroed_texture_float = create_zero_texture(texture_pool, &device, wgpu::TextureFormat::Rgba8Unorm); @@ -174,7 +212,7 @@ impl TextureManager2D { &mut self, texture_pool: &mut GpuTexturePool, creation_desc: &Texture2DCreationDesc<'_>, - ) -> GpuTexture2D { + ) -> Result { // TODO(andreas): Disabled the warning as we're moving towards using this texture manager for user-logged images. // However, it's still very much a concern especially once we add mipmapping. Something we need to keep in mind. // @@ -201,39 +239,47 @@ impl TextureManager2D { key: u64, texture_pool: &mut GpuTexturePool, texture_desc: Texture2DCreationDesc<'_>, - ) -> GpuTexture2D { - enum Never {} - match self.get_or_create_with(key, texture_pool, || -> Result<_, Never> { - Ok(texture_desc) - }) { - Ok(tex_handle) => tex_handle, - Err(never) => match never {}, - } + ) -> Result { + self.get_or_create_with(key, texture_pool, || texture_desc) } /// Creates a new 2D texture resource and schedules data upload to the GPU if a texture /// wasn't already created using the same key. - pub fn get_or_create_with<'a, Err>( + pub fn get_or_create_with<'a>( + &mut self, + key: u64, + texture_pool: &mut GpuTexturePool, + create_texture_desc: impl FnOnce() -> Texture2DCreationDesc<'a>, + ) -> Result { + self.get_or_try_create_with(key, texture_pool, || -> Result<_, never::Never> { + Ok(create_texture_desc()) + }) + .map_err(|err| err.into()) + } + + /// Creates a new 2D texture resource and schedules data upload to the GPU if a texture + /// wasn't already created using the same key. + pub fn get_or_try_create_with<'a, Err: std::fmt::Display>( &mut self, key: u64, texture_pool: &mut GpuTexturePool, try_create_texture_desc: impl FnOnce() -> Result, Err>, - ) -> Result { + ) -> Result> { let texture_handle = match self.texture_cache.entry(key) { std::collections::hash_map::Entry::Occupied(texture_handle) => { texture_handle.get().clone() // already inserted } std::collections::hash_map::Entry::Vacant(entry) => { // Run potentially expensive texture creation code: - let tex_creation_desc = try_create_texture_desc()?; - entry - .insert(Self::create_and_upload_texture( - &self.device, - &self.queue, - texture_pool, - &tex_creation_desc, - )) - .clone() + let tex_creation_desc = try_create_texture_desc() + .map_err(|err| TextureManager2DError::DataCreation(err))?; + let texture = Self::create_and_upload_texture( + &self.device, + &self.queue, + texture_pool, + &tex_creation_desc, + )?; + entry.insert(texture).clone() } }; @@ -276,8 +322,13 @@ impl TextureManager2D { queue: &wgpu::Queue, texture_pool: &mut GpuTexturePool, creation_desc: &Texture2DCreationDesc<'_>, - ) -> GpuTexture2D { + ) -> Result { crate::profile_function!(); + + if creation_desc.width == 0 || creation_desc.height == 0 { + return Err(TextureCreationError::ZeroSize(creation_desc.label.clone())); + } + let size = wgpu::Extent3d { width: creation_desc.width, height: creation_desc.height, @@ -296,9 +347,15 @@ impl TextureManager2D { }, ); - let format_info = creation_desc.format.describe(); - let width_blocks = creation_desc.width / format_info.block_dimensions.0 as u32; - let bytes_per_row_unaligned = width_blocks * format_info.block_size as u32; + let width_blocks = creation_desc.width / creation_desc.format.block_dimensions().0; + let block_size = creation_desc + .format + .block_size(Some(wgpu::TextureAspect::All)) + .ok_or_else(|| TextureCreationError::UnsupportedFormatForTransfer { + label: creation_desc.label.clone(), + format: creation_desc.format, + })?; + let bytes_per_row_unaligned = width_blocks * block_size; // TODO(andreas): Once we have our own temp buffer for uploading, we can do the padding inplace // I.e. the only difference will be if we do one memcopy or one memcopy per row, making row padding a nuisance! @@ -317,9 +374,7 @@ impl TextureManager2D { data, wgpu::ImageDataLayout { offset: 0, - bytes_per_row: Some( - NonZeroU32::new(bytes_per_row_unaligned).expect("invalid bytes per row"), - ), + bytes_per_row: Some(bytes_per_row_unaligned), rows_per_image: None, }, size, @@ -327,7 +382,7 @@ impl TextureManager2D { // TODO(andreas): mipmap generation - GpuTexture2D(texture) + Ok(GpuTexture2D(texture)) } pub(crate) fn begin_frame(&mut self, _frame_index: u64) { diff --git a/crates/re_renderer/src/texture_info.rs b/crates/re_renderer/src/texture_info.rs new file mode 100644 index 000000000000..e18d447e62f8 --- /dev/null +++ b/crates/re_renderer/src/texture_info.rs @@ -0,0 +1,206 @@ +use std::borrow::Cow; + +/// Utility for dealing with buffers containing raw 2D texture data. +#[derive(Clone)] +pub struct Texture2DBufferInfo { + /// How many bytes per row contain actual data. + pub bytes_per_row_unpadded: u32, + + /// How many bytes per row are required to be allocated in total. + /// + /// Padding bytes are always at the end of a row. + pub bytes_per_row_padded: u32, + + /// Size required for an unpadded buffer. + pub buffer_size_unpadded: wgpu::BufferAddress, + + /// Size required for a padded buffer as it is read/written from/to the GPU. + pub buffer_size_padded: wgpu::BufferAddress, +} + +impl Texture2DBufferInfo { + /// Retrieves 2D texture buffer info for a given format & texture size. + /// + /// If a single buffer is not possible for all aspects of the texture format, all sizes will be zero. + #[inline] + pub fn new(format: wgpu::TextureFormat, extent: glam::UVec2) -> Self { + let block_dimensions = format.block_dimensions(); + let width_blocks = extent.x / block_dimensions.0; + let height_blocks = extent.y / block_dimensions.1; + + let block_size = format + .block_size(Some(wgpu::TextureAspect::All)) + .unwrap_or(0); // This happens if we can't have a single buffer. + let bytes_per_row_unpadded = width_blocks * block_size; + let bytes_per_row_padded = + wgpu::util::align_to(bytes_per_row_unpadded, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT); + + Self { + bytes_per_row_unpadded, + bytes_per_row_padded, + buffer_size_unpadded: (bytes_per_row_unpadded * height_blocks) as wgpu::BufferAddress, + buffer_size_padded: (bytes_per_row_padded * height_blocks) as wgpu::BufferAddress, + } + } + + #[inline] + pub fn num_rows(&self) -> u32 { + self.buffer_size_padded as u32 / self.bytes_per_row_padded + } + + /// Removes the padding from a buffer containing gpu texture data. + /// + /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. + /// + /// Note that if you're passing in gpu data, there no alignment guarantees on the returned slice, + /// do NOT convert it using [`bytemuck`]. Use [`Texture2DBufferInfo::remove_padding_and_convert`] instead. + pub fn remove_padding<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> { + crate::profile_function!(); + + assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); + + if self.bytes_per_row_padded == self.bytes_per_row_unpadded { + return Cow::Borrowed(buffer); + } + + let mut unpadded_buffer = Vec::with_capacity(self.buffer_size_unpadded as _); + + for row in 0..self.num_rows() { + let offset = (self.bytes_per_row_padded * row) as usize; + unpadded_buffer.extend_from_slice( + &buffer[offset..(offset + self.bytes_per_row_unpadded as usize)], + ); + } + + unpadded_buffer.into() + } + + /// Removes the padding from a buffer containing gpu texture data and remove convert to a given type. + /// + /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. + /// + /// The unpadded row size is expected to be a multiple of the size of the target type. + /// (Which means that, while uncommon, it technically doesn't need to be as big as a block in the pixel - this can be useful for e.g. packing wide bitfields) + pub fn remove_padding_and_convert(&self, buffer: &[u8]) -> Vec { + crate::profile_function!(); + + assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); + assert!(self.bytes_per_row_unpadded % std::mem::size_of::() as u32 == 0); + + // Due to https://github.com/gfx-rs/wgpu/issues/3508 the data might be completely unaligned, + // so much, that we can't even interpret it as e.g. a u32 slice. + // Therefore, we have to do a copy of the data regardless of whether it's padded or not. + + let mut unpadded_buffer: Vec = vec![ + T::zeroed(); + (self.num_rows() * self.bytes_per_row_unpadded / std::mem::size_of::() as u32) + as usize + ]; // TODO(andreas): Consider using unsafe set_len() instead of vec![] to avoid zeroing the memory. + + // The copy has to happen on a u8 slice, because any other type would assume some alignment that we can't guarantee because of the above. + let unpadded_buffer_u8_view = bytemuck::cast_slice_mut(&mut unpadded_buffer); + + for row in 0..self.num_rows() { + let offset_padded = (self.bytes_per_row_padded * row) as usize; + let offset_unpadded = (self.bytes_per_row_unpadded * row) as usize; + unpadded_buffer_u8_view + [offset_unpadded..(offset_unpadded + self.bytes_per_row_unpadded as usize)] + .copy_from_slice( + &buffer[offset_padded..(offset_padded + self.bytes_per_row_unpadded as usize)], + ); + } + + unpadded_buffer + } +} + +pub fn is_float_filterable(format: wgpu::TextureFormat, device_features: wgpu::Features) -> bool { + format + .guaranteed_format_features(device_features) + .flags + .contains(wgpu::TextureFormatFeatureFlags::FILTERABLE) +} + +pub fn num_texture_components(format: wgpu::TextureFormat) -> u8 { + #[allow(clippy::match_same_arms)] + match format { + wgpu::TextureFormat::R8Unorm + | wgpu::TextureFormat::R8Snorm + | wgpu::TextureFormat::R8Uint + | wgpu::TextureFormat::R8Sint + | wgpu::TextureFormat::R16Uint + | wgpu::TextureFormat::R16Sint + | wgpu::TextureFormat::R16Unorm + | wgpu::TextureFormat::R16Snorm + | wgpu::TextureFormat::R16Float + | wgpu::TextureFormat::R32Uint + | wgpu::TextureFormat::R32Sint + | wgpu::TextureFormat::R32Float => 1, + + wgpu::TextureFormat::Rg8Unorm + | wgpu::TextureFormat::Rg8Snorm + | wgpu::TextureFormat::Rg8Uint + | wgpu::TextureFormat::Rg8Sint + | wgpu::TextureFormat::Rg16Uint + | wgpu::TextureFormat::Rg16Sint + | wgpu::TextureFormat::Rg16Unorm + | wgpu::TextureFormat::Rg16Snorm + | wgpu::TextureFormat::Rg16Float + | wgpu::TextureFormat::Rg32Uint + | wgpu::TextureFormat::Rg32Sint + | wgpu::TextureFormat::Rg32Float => 2, + + wgpu::TextureFormat::Rgba8Unorm + | wgpu::TextureFormat::Rgba8UnormSrgb + | wgpu::TextureFormat::Rgba8Snorm + | wgpu::TextureFormat::Rgba8Uint + | wgpu::TextureFormat::Rgba8Sint + | wgpu::TextureFormat::Bgra8Unorm + | wgpu::TextureFormat::Bgra8UnormSrgb + | wgpu::TextureFormat::Rgba16Uint + | wgpu::TextureFormat::Rgba16Sint + | wgpu::TextureFormat::Rgba16Unorm + | wgpu::TextureFormat::Rgba16Snorm + | wgpu::TextureFormat::Rgba16Float + | wgpu::TextureFormat::Rgba32Uint + | wgpu::TextureFormat::Rgba32Sint + | wgpu::TextureFormat::Rgba32Float => 4, + + wgpu::TextureFormat::Rgb9e5Ufloat | wgpu::TextureFormat::Rg11b10Float => 3, + wgpu::TextureFormat::Rgb10a2Unorm => 4, + + wgpu::TextureFormat::Stencil8 + | wgpu::TextureFormat::Depth16Unorm + | wgpu::TextureFormat::Depth24Plus + | wgpu::TextureFormat::Depth32Float => 1, + + // It's complicated. Each aspect has actually only a single channel. + wgpu::TextureFormat::Depth24PlusStencil8 | wgpu::TextureFormat::Depth32FloatStencil8 => 2, + + wgpu::TextureFormat::Bc1RgbaUnorm + | wgpu::TextureFormat::Bc1RgbaUnormSrgb + | wgpu::TextureFormat::Bc2RgbaUnorm + | wgpu::TextureFormat::Bc2RgbaUnormSrgb + | wgpu::TextureFormat::Bc3RgbaUnorm + | wgpu::TextureFormat::Bc3RgbaUnormSrgb + | wgpu::TextureFormat::Bc4RUnorm + | wgpu::TextureFormat::Bc4RSnorm + | wgpu::TextureFormat::Bc5RgUnorm + | wgpu::TextureFormat::Bc5RgSnorm + | wgpu::TextureFormat::Bc6hRgbUfloat + | wgpu::TextureFormat::Bc6hRgbFloat + | wgpu::TextureFormat::Bc7RgbaUnorm + | wgpu::TextureFormat::Bc7RgbaUnormSrgb + | wgpu::TextureFormat::Etc2Rgb8Unorm + | wgpu::TextureFormat::Etc2Rgb8UnormSrgb + | wgpu::TextureFormat::Etc2Rgb8A1Unorm + | wgpu::TextureFormat::Etc2Rgb8A1UnormSrgb + | wgpu::TextureFormat::Etc2Rgba8Unorm + | wgpu::TextureFormat::Etc2Rgba8UnormSrgb + | wgpu::TextureFormat::EacR11Unorm + | wgpu::TextureFormat::EacR11Snorm + | wgpu::TextureFormat::EacRg11Unorm + | wgpu::TextureFormat::EacRg11Snorm + | wgpu::TextureFormat::Astc { .. } => 4, + } +} diff --git a/crates/re_renderer/src/view_builder.rs b/crates/re_renderer/src/view_builder.rs index 3d57b21a6f1c..86d8fc817b59 100644 --- a/crates/re_renderer/src/view_builder.rs +++ b/crates/re_renderer/src/view_builder.rs @@ -6,7 +6,8 @@ use crate::{ allocator::{create_and_fill_uniform_buffer, GpuReadbackIdentifier}, context::RenderContext, draw_phases::{ - DrawPhase, OutlineConfig, OutlineMaskProcessor, PickingLayerProcessor, ScreenshotProcessor, + DrawPhase, OutlineConfig, OutlineMaskProcessor, PickingLayerError, PickingLayerProcessor, + ScreenshotProcessor, }, global_bindings::FrameUniformBuffer, renderer::{CompositorDrawData, DebugOverlayDrawData, DrawData, Renderer}, @@ -37,6 +38,9 @@ pub enum ViewBuilderError { #[error("Picking rectangle readback was already scheduled.")] PickingRectAlreadyScheduled, + + #[error(transparent)] + InvalidDebugOverlay(#[from] crate::renderer::DebugOverlayError), } /// The highest level rendering block in `re_renderer`. @@ -562,7 +566,15 @@ impl ViewBuilder { //pass.set_bind_group(0, &setup.bind_group_0, &[]); self.draw_phase(ctx, DrawPhase::PickingLayer, &mut pass); } - picking_processor.end_render_pass(&mut encoder, &ctx.gpu_resources)?; + match picking_processor.end_render_pass(&mut encoder, &ctx.gpu_resources) { + Err(PickingLayerError::ResourcePoolError(err)) => { + return Err(err); + } + Err(PickingLayerError::ReadbackError(err)) => { + re_log::warn_once!("Failed to schedule picking data readback: {err}"); + } + Ok(()) => {} + } } if let Some(outline_mask_processor) = self.outline_mask_processor.take() { @@ -582,7 +594,12 @@ impl ViewBuilder { pass.set_bind_group(0, &setup.bind_group_0, &[]); self.draw_phase(ctx, DrawPhase::CompositingScreenshot, &mut pass); } - screenshot_processor.end_render_pass(&mut encoder); + match screenshot_processor.end_render_pass(&mut encoder) { + Ok(()) => {} + Err(err) => { + re_log::warn_once!("Failed to schedule screenshot data readback: {err}"); + } + } } Ok(encoder.finish()) @@ -692,7 +709,7 @@ impl ViewBuilder { &picking_processor.picking_target, self.setup.resolution_in_pixel.into(), picking_rect, - )); + )?); } self.picking_processor = Some(picking_processor); diff --git a/crates/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs b/crates/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs index 6c873429db4b..5c71fe51ac28 100644 --- a/crates/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs +++ b/crates/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs @@ -148,6 +148,13 @@ where self.current_frame_index = frame_index; let state = self.state.get_mut(); + let update_stats = |creation_desc: &Desc| { + self.total_resource_size_in_bytes.fetch_sub( + creation_desc.resource_size_in_bytes(), + std::sync::atomic::Ordering::Relaxed, + ); + }; + // Throw out any resources that we haven't reclaimed last frame. for (desc, resources) in state.last_frame_deallocated.drain() { re_log::trace!( @@ -160,11 +167,8 @@ where debug_assert!(false, "a resource was marked as destroyed last frame that we no longer kept track of"); continue; }; + update_stats(&desc); on_destroy_resource(&removed_resource); - self.total_resource_size_in_bytes.fetch_sub( - desc.resource_size_in_bytes(), - std::sync::atomic::Ordering::Relaxed, - ); } } @@ -184,6 +188,7 @@ where .push(resource.handle); true } else { + update_stats(&resource.creation_desc); on_destroy_resource(&resource.inner); false } diff --git a/crates/re_renderer/src/wgpu_resources/mod.rs b/crates/re_renderer/src/wgpu_resources/mod.rs index 06f30b6292d3..e5e3a389f731 100644 --- a/crates/re_renderer/src/wgpu_resources/mod.rs +++ b/crates/re_renderer/src/wgpu_resources/mod.rs @@ -7,7 +7,6 @@ //! higher level resources that arise from processing user provided data. mod bind_group_layout_pool; -use std::borrow::Cow; pub use bind_group_layout_pool::{ BindGroupLayoutDesc, GpuBindGroupLayoutHandle, GpuBindGroupLayoutPool, @@ -116,112 +115,3 @@ impl WgpuResourcePools { } } } - -/// Utility for dealing with buffers containing raw 2D texture data. -#[derive(Clone)] -pub struct Texture2DBufferInfo { - /// How many bytes per row contain actual data. - pub bytes_per_row_unpadded: u32, - - /// How many bytes per row are required to be allocated in total. - /// - /// Padding bytes are always at the end of a row. - pub bytes_per_row_padded: u32, - - /// Size required for an unpadded buffer. - pub buffer_size_unpadded: wgpu::BufferAddress, - - /// Size required for a padded buffer as it is read/written from/to the GPU. - pub buffer_size_padded: wgpu::BufferAddress, -} - -impl Texture2DBufferInfo { - #[inline] - pub fn new(format: wgpu::TextureFormat, extent: glam::UVec2) -> Self { - let format_info = format.describe(); - - let width_blocks = extent.x / format_info.block_dimensions.0 as u32; - let height_blocks = extent.y / format_info.block_dimensions.1 as u32; - - let bytes_per_row_unpadded = width_blocks * format_info.block_size as u32; - let bytes_per_row_padded = - wgpu::util::align_to(bytes_per_row_unpadded, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT); - - Self { - bytes_per_row_unpadded, - bytes_per_row_padded, - buffer_size_unpadded: (bytes_per_row_unpadded * height_blocks) as wgpu::BufferAddress, - buffer_size_padded: (bytes_per_row_padded * height_blocks) as wgpu::BufferAddress, - } - } - - #[inline] - pub fn num_rows(&self) -> u32 { - self.buffer_size_padded as u32 / self.bytes_per_row_padded - } - - /// Removes the padding from a buffer containing gpu texture data. - /// - /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. - /// - /// Note that if you're passing in gpu data, there no alignment guarantees on the returned slice, - /// do NOT convert it using [`bytemuck`]. Use [`Texture2DBufferInfo::remove_padding_and_convert`] instead. - pub fn remove_padding<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> { - crate::profile_function!(); - - assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); - - if self.bytes_per_row_padded == self.bytes_per_row_unpadded { - return Cow::Borrowed(buffer); - } - - let mut unpadded_buffer = Vec::with_capacity(self.buffer_size_unpadded as _); - - for row in 0..self.num_rows() { - let offset = (self.bytes_per_row_padded * row) as usize; - unpadded_buffer.extend_from_slice( - &buffer[offset..(offset + self.bytes_per_row_unpadded as usize)], - ); - } - - unpadded_buffer.into() - } - - /// Removes the padding from a buffer containing gpu texture data and remove convert to a given type. - /// - /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. - /// - /// The unpadded row size is expected to be a multiple of the size of the target type. - /// (Which means that, while uncommon, it technically doesn't need to be as big as a block in the pixel - this can be useful for e.g. packing wide bitfields) - pub fn remove_padding_and_convert(&self, buffer: &[u8]) -> Vec { - crate::profile_function!(); - - assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); - assert!(self.bytes_per_row_unpadded % std::mem::size_of::() as u32 == 0); - - // Due to https://github.com/gfx-rs/wgpu/issues/3508 the data might be completely unaligned, - // so much, that we can't even interpret it as e.g. a u32 slice. - // Therefore, we have to do a copy of the data regardless of whether it's padded or not. - - let mut unpadded_buffer: Vec = vec![ - T::zeroed(); - (self.num_rows() * self.bytes_per_row_unpadded / std::mem::size_of::() as u32) - as usize - ]; // TODO(andreas): Consider using unsafe set_len() instead of vec![] to avoid zeroing the memory. - - // The copy has to happen on a u8 slice, because any other type would assume some alignment that we can't guarantee because of the above. - let unpadded_buffer_u8_view = bytemuck::cast_slice_mut(&mut unpadded_buffer); - - for row in 0..self.num_rows() { - let offset_padded = (self.bytes_per_row_padded * row) as usize; - let offset_unpadded = (self.bytes_per_row_unpadded * row) as usize; - unpadded_buffer_u8_view - [offset_unpadded..(offset_unpadded + self.bytes_per_row_unpadded as usize)] - .copy_from_slice( - &buffer[offset_padded..(offset_padded + self.bytes_per_row_unpadded as usize)], - ); - } - - unpadded_buffer - } -} diff --git a/crates/re_renderer/src/wgpu_resources/sampler_pool.rs b/crates/re_renderer/src/wgpu_resources/sampler_pool.rs index 53eed49cdf73..2c03824cf1c9 100644 --- a/crates/re_renderer/src/wgpu_resources/sampler_pool.rs +++ b/crates/re_renderer/src/wgpu_resources/sampler_pool.rs @@ -1,4 +1,4 @@ -use std::{hash::Hash, num::NonZeroU8}; +use std::hash::Hash; use super::{resource::PoolError, static_resource_pool::StaticResourcePool}; use crate::debug_label::DebugLabel; @@ -33,9 +33,6 @@ pub struct SamplerDesc { /// Maximum level of detail (i.e. mip level) to use pub lod_max_clamp: ordered_float::NotNan, - - /// Valid values: 1, 2, 4, 8, and 16. - pub anisotropy_clamp: Option, } #[derive(Default)] @@ -56,11 +53,11 @@ impl GpuSamplerPool { mipmap_filter: desc.mipmap_filter, lod_min_clamp: desc.lod_min_clamp.into(), lod_max_clamp: desc.lod_max_clamp.into(), - anisotropy_clamp: desc.anisotropy_clamp, // Unsupported compare: None, border_color: None, + anisotropy_clamp: 1, }) }) } diff --git a/crates/re_renderer/src/wgpu_resources/shader_module_pool.rs b/crates/re_renderer/src/wgpu_resources/shader_module_pool.rs index 1fb291f47f05..9b42f87d1788 100644 --- a/crates/re_renderer/src/wgpu_resources/shader_module_pool.rs +++ b/crates/re_renderer/src/wgpu_resources/shader_module_pool.rs @@ -54,13 +54,18 @@ impl ShaderModuleDesc { &self, device: &wgpu::Device, resolver: &mut FileResolver, + shader_text_workaround_replacements: &[(String, String)], ) -> wgpu::ShaderModule { - let source_interpolated = resolver + let mut source_interpolated = resolver .populate(&self.source) .context("couldn't resolve shader module's contents") .map_err(|err| re_log::error!(err=%re_error::format(err))) .unwrap_or_default(); + for (from, to) in shader_text_workaround_replacements { + source_interpolated.contents = source_interpolated.contents.replace(from, to); + } + // All wgpu errors come asynchronously: this call will succeed whether the given // source is valid or not. // Only when actually submitting passes that make use of this shader will we know if @@ -78,6 +83,11 @@ impl ShaderModuleDesc { #[derive(Default)] pub struct GpuShaderModulePool { pool: StaticResourcePool, + + /// Workarounds via text replacement in shader source code. + /// + /// TODO(andreas): These should be solved with a pre-processor. + pub shader_text_workaround_replacements: Vec<(String, String)>, } impl GpuShaderModulePool { @@ -87,8 +97,9 @@ impl GpuShaderModulePool { resolver: &mut FileResolver, desc: &ShaderModuleDesc, ) -> GpuShaderModuleHandle { - self.pool - .get_or_create(desc, |desc| desc.create_shader_module(device, resolver)) + self.pool.get_or_create(desc, |desc| { + desc.create_shader_module(device, resolver, &self.shader_text_workaround_replacements) + }) } pub fn begin_frame( @@ -115,7 +126,11 @@ impl GpuShaderModulePool { } paths.iter().any(|p| updated_paths.contains(p)).then(|| { - let shader_module = desc.create_shader_module(device, resolver); + let shader_module = desc.create_shader_module( + device, + resolver, + &self.shader_text_workaround_replacements, + ); re_log::debug!(?desc.source, label = desc.label.get(), "recompiled shader module"); shader_module }) diff --git a/crates/re_renderer/src/wgpu_resources/texture_pool.rs b/crates/re_renderer/src/wgpu_resources/texture_pool.rs index c113b4f603d8..d194b387f773 100644 --- a/crates/re_renderer/src/wgpu_resources/texture_pool.rs +++ b/crates/re_renderer/src/wgpu_resources/texture_pool.rs @@ -69,9 +69,20 @@ impl DynamicResourcesDesc for TextureDesc { /// The actual number might be both bigger (padding) and lower (gpu sided compression). fn resource_size_in_bytes(&self) -> u64 { let mut size_in_bytes = 0; - let format_desc = self.format.describe(); - let pixels_per_block = - format_desc.block_dimensions.0 as u64 * format_desc.block_dimensions.1 as u64; + let block_size = self + .format + .block_size(Some(wgpu::TextureAspect::All)) + .unwrap_or_else(|| { + self.format + .block_size(Some(wgpu::TextureAspect::DepthOnly)) + .unwrap_or(0) + + self + .format + .block_size(Some(wgpu::TextureAspect::StencilOnly)) + .unwrap_or(0) + }); + let block_dimension = self.format.block_dimensions(); + let pixels_per_block = block_dimension.0 as u64 * block_dimension.1 as u64; for mip in 0..self.size.max_mips(self.dimension) { let mip_size = self @@ -80,7 +91,7 @@ impl DynamicResourcesDesc for TextureDesc { .physical_size(self.format); let num_pixels = mip_size.width * mip_size.height * mip_size.depth_or_array_layers; let num_blocks = num_pixels as u64 / pixels_per_block; - size_in_bytes += num_blocks * format_desc.block_size as u64; + size_in_bytes += num_blocks * block_size as u64; } size_in_bytes diff --git a/crates/re_ui/Cargo.toml b/crates/re_ui/Cargo.toml index cfa5732751ca..b9a59e8f9c0e 100644 --- a/crates/re_ui/Cargo.toml +++ b/crates/re_ui/Cargo.toml @@ -30,8 +30,8 @@ egui_dock = ["dep:egui_dock"] [dependencies] -egui = { workspace = true, features = ["extra_debug_asserts", "tracing"] } -egui_extras = { workspace = true, features = ["tracing"] } +egui.workspace = true +egui_extras.workspace = true image = { workspace = true, default-features = false, features = ["png"] } parking_lot.workspace = true serde = { version = "1", features = ["derive"] } @@ -39,11 +39,10 @@ serde_json = "1" strum = { version = "0.24", features = ["derive"] } strum_macros = "0.24" sublime_fuzzy = "0.7" - ## Optional dependencies: eframe = { workspace = true, optional = true, default-features = false } egui_dock = { workspace = true, optional = true, features = ["serde"] } +re_log.workspace = true [dev-dependencies] eframe = { workspace = true, default-features = false, features = ["wgpu"] } -re_log.workspace = true diff --git a/crates/re_ui/data/design_tokens.json b/crates/re_ui/data/design_tokens.json index 9c8974d9a4c5..7845d4a7ad2d 100644 --- a/crates/re_ui/data/design_tokens.json +++ b/crates/re_ui/data/design_tokens.json @@ -3,121 +3,126 @@ "Color": { "Surface": { "Default": { - "description": "Background color for most UI surfaces in Rerun", - "value": "{Global.Color.Grey.100}", - "type": "color" + "value": "{Global.Color.Gray.50}", + "type": "color", + "description": "Background color for most UI surfaces in Rerun" }, "Floating": { - "description": "Background color for floating elements like menus, dropdown options, notifications etc.", - "value": "{Global.Color.Grey.175}", - "type": "color" + "value": "{Global.Color.Primary.25}", + "type": "color", + "description": "Background color for floating elements like menus, dropdown options, notifications etc." } }, "Action": { "Default": { - "description": "Background color for UI elements like buttons and selects", - "value": "{Global.Color.Grey.200}", - "type": "color" + "value": "{Global.Color.Gray.200}", + "type": "color", + "description": "Background color for UI elements like buttons and selects" }, "Hovered": { - "description": "Background color for hovered UI elements", - "value": "{Global.Color.Grey.225}", - "type": "color" + "value": "{Global.Color.Primary.Hover}", + "type": "color", + "description": "Background color for hovered UI elements" }, "Active": { - "description": "Background color for pressed UI elements", - "value": "{Global.Color.Grey.250}", - "type": "color" + "value": "{Global.Color.Primary.Hover}", + "type": "color", + "description": "Background color for pressed UI elements" }, "Pressed": { - "description": "Background color for suppressed UI elements, like a select that is currently showing a menu", - "value": "{Global.Color.Grey.250}", - "type": "color" + "value": "{Global.Color.Primary.Hover}", + "type": "color", + "description": "Background color for suppressed UI elements, like a select that is currently showing a menu" + }, + "Inactive": { + "value": "{Global.Color.Gray.200}", + "type": "color", + "description": "Background color for inactive buttons and such" } }, "NotificationBadge": { "Unread": { - "description": "Used for unread notification indicators", "value": "{Global.Color.Blue.500}", - "type": "color" + "type": "color", + "description": "Used for unread notification indicators" }, "Read": { - "description": "Used for read notification indicators", "value": "{Global.Color.Grey.250}", - "type": "color" + "type": "color", + "description": "Used for read notification indicators" } }, "Text": { "Default": { - "description": "Default text color", - "value": "{Global.Color.Grey.775}", - "type": "color" + "value": "{Global.Color.Text.Primary}", + "type": "color", + "description": "Default text color" }, "Subdued": { - "description": "Used for less important text", - "value": "{Global.Color.Grey.550}", - "type": "color" + "value": "{Global.Color.Text.Secondary}", + "type": "color", + "description": "Used for less important text" }, "Strong": { - "description": "Used for highlighted or emphasized items, such as current navigation items", - "value": "{Global.Color.Grey.1000}", - "type": "color" + "value": "{Global.Color.Black}", + "type": "color", + "description": "Used for highlighted or emphasized items, such as current navigation items" } }, "Border": { "Default": { - "value": "{Global.Color.OpaqueGrey.Default}", - "description": "Default color for borders", - "type": "color" + "value": "{Global.Color.Gray.200}", + "type": "color", + "description": "Default color for borders" } }, "Icon": { "Default": { - "description": "Default icon color", "value": "{Global.Color.Grey.775}", - "type": "color" + "type": "color", + "description": "Default icon color" }, "Subdued": { - "description": "Used together with subdued text", "value": "{Global.Color.Grey.550}", - "type": "color" + "type": "color", + "description": "Used together with subdued text" }, "Strong": { - "description": "Used together width strong text", "value": "{Global.Color.Grey.1000}", - "type": "color" + "type": "color", + "description": "Used together width strong text" } }, "Highlight": { "Default": { - "value": "{Global.Color.Blue.350}", - "description": "Default color for highlighted items, like hovered menu items", - "type": "color" + "value": "{Global.Color.Primary.Hover}", + "type": "color", + "description": "Default color for highlighted items, like hovered menu items" } } }, "Typography": { "Default": { "value": "{Global.Typography.200}", - "description": "Default font in Rerun's UI", - "type": "typography" + "type": "typography", + "description": "Default font in Rerun's UI" } }, "Shadow": { "Menu": { - "description": "Used for menus, such as selects", "value": "{Global.Shadow.100}", - "type": "boxShadow" + "type": "boxShadow", + "description": "Used for menus, such as selects" }, "Popover": { - "description": "Used for popovers and other semi-modal elements", "value": "{Global.Shadow.400}", - "type": "boxShadow" + "type": "boxShadow", + "description": "Used for popovers and other semi-modal elements" }, "Modal": { - "description": "Used for modal views", "value": "{Global.Shadow.800}", - "type": "boxShadow" + "type": "boxShadow", + "description": "Used for modal views" } } }, @@ -126,1042 +131,1280 @@ "Grey": { "0": { "value": "#000000", - "description": "0 - 0", - "type": "color" + "type": "color", + "description": "0 - 0" }, "25": { "value": "#020303", - "description": "0.7405999850077014 - 0.025", - "type": "color" + "type": "color", + "description": "0.7405999850077014 - 0.025" }, "50": { "value": "#050607", - "description": "1.7116872276823336 - 0.05", - "type": "color" + "type": "color", + "description": "1.7116872276823336 - 0.05" }, "75": { "value": "#090b0c", - "description": "2.911127985087129 - 0.075", - "type": "color" + "type": "color", + "description": "2.911127985087129 - 0.075" }, "100": { "value": "#0d1011", - "description": "4.335380638931743 - 0.1", - "type": "color" + "type": "color", + "description": "4.335380638931743 - 0.1" }, "125": { "value": "#111415", - "description": "5.979517530826747 - 0.125", - "type": "color" + "type": "color", + "description": "5.979517530826747 - 0.125" }, "150": { "value": "#141819", - "description": "7.837255342936205 - 0.15", - "type": "color" + "type": "color", + "description": "7.837255342936205 - 0.15" }, "175": { "value": "#181c1e", - "description": "9.90099383672154 - 0.175", - "type": "color" + "type": "color", + "description": "9.90099383672154 - 0.175" }, "200": { "value": "#1c2123", - "description": "12.16186271093947 - 0.2", - "type": "color" + "type": "color", + "description": "12.16186271093947 - 0.2" }, "225": { "value": "#212628", - "description": "14.609776289998841 - 0.225", - "type": "color" + "type": "color", + "description": "14.609776289998841 - 0.225" }, "250": { "value": "#262b2e", - "description": "17.233495705504463 - 0.25", - "type": "color" + "type": "color", + "description": "17.233495705504463 - 0.25" }, "275": { "value": "#2b3134", - "description": "20.02069818761812 - 0.275", - "type": "color" + "type": "color", + "description": "20.02069818761812 - 0.275" }, "300": { "value": "#31383b", - "description": "22.958053039032254 - 0.3", - "type": "color" + "type": "color", + "description": "22.958053039032254 - 0.3" }, "325": { "value": "#373f42", - "description": "26.03130382315192 - 0.325", - "type": "color" + "type": "color", + "description": "26.03130382315192 - 0.325" }, "350": { "value": "#3e464a", - "description": "29.22535625976699 - 0.35", - "type": "color" + "type": "color", + "description": "29.22535625976699 - 0.35" }, "375": { "value": "#454e52", - "description": "32.524371286309126 - 0.375", - "type": "color" + "type": "color", + "description": "32.524371286309126 - 0.375" }, "400": { "value": "#4c565a", - "description": "35.91186271093947 - 0.4", - "type": "color" + "type": "color", + "description": "35.91186271093947 - 0.4" }, "425": { "value": "#545e63", - "description": "39.37079885540354 - 0.425", - "type": "color" + "type": "color", + "description": "39.37079885540354 - 0.425" }, "450": { "value": "#5c676c", - "description": "42.88370756099135 - 0.45", - "type": "color" + "type": "color", + "description": "42.88370756099135 - 0.45" }, "475": { "value": "#647075", - "description": "46.43278391020581 - 0.475", - "type": "color" + "type": "color", + "description": "46.43278391020581 - 0.475" }, "500": { "value": "#6c797f", - "description": "49.99999999999999 - 0.5", - "type": "color" + "type": "color", + "description": "49.99999999999999 - 0.5" }, "525": { "value": "#748288", - "description": "53.56721608979418 - 0.525", - "type": "color" + "type": "color", + "description": "53.56721608979418 - 0.525" }, "550": { "value": "#7d8c92", - "description": "57.11629243900867 - 0.55", - "type": "color" + "type": "color", + "description": "57.11629243900867 - 0.55" }, "575": { "value": "#85959c", - "description": "60.62920114459644 - 0.575", - "type": "color" + "type": "color", + "description": "60.62920114459644 - 0.575" }, "600": { "value": "#8e9ea5", - "description": "64.08813728906053 - 0.6", - "type": "color" + "type": "color", + "description": "64.08813728906053 - 0.6" }, "625": { "value": "#96a7af", - "description": "67.47562871369087 - 0.625", - "type": "color" + "type": "color", + "description": "67.47562871369087 - 0.625" }, "650": { "value": "#9eb0b8", - "description": "70.774643740233 - 0.65", - "type": "color" + "type": "color", + "description": "70.774643740233 - 0.65" }, "675": { "value": "#a6b9c1", - "description": "73.96869617684807 - 0.675", - "type": "color" + "type": "color", + "description": "73.96869617684807 - 0.675" }, "700": { "value": "#aec2ca", - "description": "77.04194696096773 - 0.7", - "type": "color" + "type": "color", + "description": "77.04194696096773 - 0.7" }, "725": { "value": "#b6cad2", - "description": "79.97930181238189 - 0.725", - "type": "color" + "type": "color", + "description": "79.97930181238189 - 0.725" }, "750": { "value": "#c0d1d8", - "description": "82.76650429449552 - 0.75", - "type": "color" + "type": "color", + "description": "82.76650429449552 - 0.75" }, "775": { "value": "#cad8de", - "description": "85.39022371000115 - 0.775", - "type": "color" + "type": "color", + "description": "85.39022371000115 - 0.775" }, "800": { "value": "#d3dee3", - "description": "87.83813728906054 - 0.8", - "type": "color" + "type": "color", + "description": "87.83813728906054 - 0.8" }, "825": { "value": "#dbe4e8", - "description": "90.09900616327847 - 0.825", - "type": "color" + "type": "color", + "description": "90.09900616327847 - 0.825" }, "850": { "value": "#e3eaed", - "description": "92.16274465706378 - 0.85", - "type": "color" + "type": "color", + "description": "92.16274465706378 - 0.85" }, "875": { "value": "#e9eff1", - "description": "94.02048246917325 - 0.875", - "type": "color" + "type": "color", + "description": "94.02048246917325 - 0.875" }, "900": { "value": "#eff3f5", - "description": "95.66461936106825 - 0.9", - "type": "color" + "type": "color", + "description": "95.66461936106825 - 0.9" }, "925": { "value": "#f4f7f8", - "description": "97.08887201491288 - 0.925", - "type": "color" + "type": "color", + "description": "97.08887201491288 - 0.925" }, "950": { "value": "#f9fafb", - "description": "98.28831277231767 - 0.95", - "type": "color" + "type": "color", + "description": "98.28831277231767 - 0.95" }, "975": { "value": "#fcfdfd", - "description": "99.25940001499231 - 0.975", - "type": "color" + "type": "color", + "description": "99.25940001499231 - 0.975" }, "1000": { "value": "#ffffff", - "description": "100 - 1", - "type": "color" + "type": "color", + "description": "100 - 1" } }, "Green": { "0": { "value": "#000000", - "description": "0 - 0", - "type": "color" + "type": "color", + "description": "0 - 0" }, "25": { "value": "#000401", - "description": "0.7405999850077014 - 0.025", - "type": "color" + "type": "color", + "description": "0.7405999850077014 - 0.025" }, "50": { "value": "#000803", - "description": "1.7116872276823336 - 0.05", - "type": "color" + "type": "color", + "description": "1.7116872276823336 - 0.05" }, "75": { "value": "#000e04", - "description": "2.911127985087129 - 0.075", - "type": "color" + "type": "color", + "description": "2.911127985087129 - 0.075" }, "100": { "value": "#001306", - "description": "4.335380638931743 - 0.1", - "type": "color" + "type": "color", + "description": "4.335380638931743 - 0.1" }, "125": { "value": "#001809", - "description": "5.979517530826747 - 0.125", - "type": "color" + "type": "color", + "description": "5.979517530826747 - 0.125" }, "150": { "value": "#001c0b", - "description": "7.837255342936205 - 0.15", - "type": "color" + "type": "color", + "description": "7.837255342936205 - 0.15" }, "175": { "value": "#00210e", - "description": "9.90099383672154 - 0.175", - "type": "color" + "type": "color", + "description": "9.90099383672154 - 0.175" }, "200": { "value": "#002611", - "description": "12.16186271093947 - 0.2", - "type": "color" + "type": "color", + "description": "12.16186271093947 - 0.2" }, "225": { "value": "#002c15", - "description": "14.609776289998841 - 0.225", - "type": "color" + "type": "color", + "description": "14.609776289998841 - 0.225" }, "250": { "value": "#003219", - "description": "17.233495705504463 - 0.25", - "type": "color" + "type": "color", + "description": "17.233495705504463 - 0.25" }, "275": { "value": "#00391d", - "description": "20.02069818761812 - 0.275", - "type": "color" + "type": "color", + "description": "20.02069818761812 - 0.275" }, "300": { "value": "#004021", - "description": "22.958053039032254 - 0.3", - "type": "color" + "type": "color", + "description": "22.958053039032254 - 0.3" }, "325": { "value": "#004826", - "description": "26.03130382315192 - 0.325", - "type": "color" + "type": "color", + "description": "26.03130382315192 - 0.325" }, "350": { "value": "#00502b", - "description": "29.22535625976699 - 0.35", - "type": "color" + "type": "color", + "description": "29.22535625976699 - 0.35" }, "375": { "value": "#005930", - "description": "32.524371286309126 - 0.375", - "type": "color" + "type": "color", + "description": "32.524371286309126 - 0.375" }, "400": { "value": "#006236", - "description": "35.91186271093947 - 0.4", - "type": "color" + "type": "color", + "description": "35.91186271093947 - 0.4" }, "425": { "value": "#006b3b", - "description": "39.37079885540354 - 0.425", - "type": "color" + "type": "color", + "description": "39.37079885540354 - 0.425" }, "450": { "value": "#007541", - "description": "42.88370756099135 - 0.45", - "type": "color" + "type": "color", + "description": "42.88370756099135 - 0.45" }, "475": { "value": "#007f47", - "description": "46.43278391020581 - 0.475", - "type": "color" + "type": "color", + "description": "46.43278391020581 - 0.475" }, "500": { "value": "#00894d", - "description": "49.99999999999999 - 0.5", - "type": "color" + "type": "color", + "description": "49.99999999999999 - 0.5" }, "525": { "value": "#009353", - "description": "53.56721608979418 - 0.525", - "type": "color" + "type": "color", + "description": "53.56721608979418 - 0.525" }, "550": { "value": "#009e5a", - "description": "57.11629243900867 - 0.55", - "type": "color" + "type": "color", + "description": "57.11629243900867 - 0.55" }, "575": { "value": "#00a860", - "description": "60.62920114459644 - 0.575", - "type": "color" + "type": "color", + "description": "60.62920114459644 - 0.575" }, "600": { "value": "#00b266", - "description": "64.08813728906053 - 0.6", - "type": "color" + "type": "color", + "description": "64.08813728906053 - 0.6" }, "625": { "value": "#00bc6c", - "description": "67.47562871369087 - 0.625", - "type": "color" + "type": "color", + "description": "67.47562871369087 - 0.625" }, "650": { "value": "#00c772", - "description": "70.774643740233 - 0.65", - "type": "color" + "type": "color", + "description": "70.774643740233 - 0.65" }, "675": { "value": "#00d078", - "description": "73.96869617684807 - 0.675", - "type": "color" + "type": "color", + "description": "73.96869617684807 - 0.675" }, "700": { "value": "#00da7e", - "description": "77.04194696096773 - 0.7", - "type": "color" + "type": "color", + "description": "77.04194696096773 - 0.7" }, "725": { "value": "#00e384", - "description": "79.97930181238189 - 0.725", - "type": "color" + "type": "color", + "description": "79.97930181238189 - 0.725" }, "750": { "value": "#00ec89", - "description": "82.76650429449552 - 0.75", - "type": "color" + "type": "color", + "description": "82.76650429449552 - 0.75" }, "775": { "value": "#00f48e", - "description": "85.39022371000115 - 0.775", - "type": "color" + "type": "color", + "description": "85.39022371000115 - 0.775" }, "800": { "value": "#00fc93", - "description": "87.83813728906054 - 0.8", - "type": "color" + "type": "color", + "description": "87.83813728906054 - 0.8" }, "825": { "value": "#5cffa5", - "description": "90.09900616327847 - 0.825", - "type": "color" + "type": "color", + "description": "90.09900616327847 - 0.825" }, "850": { "value": "#91ffbb", - "description": "92.16274465706378 - 0.85", - "type": "color" + "type": "color", + "description": "92.16274465706378 - 0.85" }, "875": { "value": "#b2ffcd", - "description": "94.02048246917325 - 0.875", - "type": "color" + "type": "color", + "description": "94.02048246917325 - 0.875" }, "900": { "value": "#caffdc", - "description": "95.66461936106825 - 0.9", - "type": "color" + "type": "color", + "description": "95.66461936106825 - 0.9" }, "925": { "value": "#ddffe8", - "description": "97.08887201491288 - 0.925", - "type": "color" + "type": "color", + "description": "97.08887201491288 - 0.925" }, "950": { "value": "#ebfff1", - "description": "98.28831277231767 - 0.95", - "type": "color" + "type": "color", + "description": "98.28831277231767 - 0.95" }, "975": { "value": "#f7fff9", - "description": "99.25940001499231 - 0.975", - "type": "color" + "type": "color", + "description": "99.25940001499231 - 0.975" }, "1000": { "value": "#ffffff", - "description": "100 - 1", - "type": "color" + "type": "color", + "description": "100 - 1" } }, "Red": { "0": { "value": "#000000", - "description": "0 - 0", - "type": "color" + "type": "color", + "description": "0 - 0" }, "25": { "value": "#0c0001", - "description": "0.7405999850077014 - 0.025", - "type": "color" + "type": "color", + "description": "0.7405999850077014 - 0.025" }, "50": { "value": "#170003", - "description": "1.7116872276823336 - 0.05", - "type": "color" + "type": "color", + "description": "1.7116872276823336 - 0.05" }, "75": { "value": "#200005", - "description": "2.911127985087129 - 0.075", - "type": "color" + "type": "color", + "description": "2.911127985087129 - 0.075" }, "100": { "value": "#290007", - "description": "4.335380638931743 - 0.1", - "type": "color" + "type": "color", + "description": "4.335380638931743 - 0.1" }, "125": { "value": "#310009", - "description": "5.979517530826747 - 0.125", - "type": "color" + "type": "color", + "description": "5.979517530826747 - 0.125" }, "150": { "value": "#38000c", - "description": "7.837255342936205 - 0.15", - "type": "color" + "type": "color", + "description": "7.837255342936205 - 0.15" }, "175": { "value": "#40000f", - "description": "9.90099383672154 - 0.175", - "type": "color" + "type": "color", + "description": "9.90099383672154 - 0.175" }, "200": { "value": "#480012", - "description": "12.16186271093947 - 0.2", - "type": "color" + "type": "color", + "description": "12.16186271093947 - 0.2" }, "225": { "value": "#520016", - "description": "14.609776289998841 - 0.225", - "type": "color" + "type": "color", + "description": "14.609776289998841 - 0.225" }, "250": { "value": "#5c001a", - "description": "17.233495705504463 - 0.25", - "type": "color" + "type": "color", + "description": "17.233495705504463 - 0.25" }, "275": { "value": "#67001e", - "description": "20.02069818761812 - 0.275", - "type": "color" + "type": "color", + "description": "20.02069818761812 - 0.275" }, "300": { "value": "#730022", - "description": "22.958053039032254 - 0.3", - "type": "color" + "type": "color", + "description": "22.958053039032254 - 0.3" }, "325": { "value": "#800027", - "description": "26.03130382315192 - 0.325", - "type": "color" + "type": "color", + "description": "26.03130382315192 - 0.325" }, "350": { "value": "#8e002c", - "description": "29.22535625976699 - 0.35", - "type": "color" + "type": "color", + "description": "29.22535625976699 - 0.35" }, "375": { "value": "#9c0031", - "description": "32.524371286309126 - 0.375", - "type": "color" + "type": "color", + "description": "32.524371286309126 - 0.375" }, "400": { "value": "#ab0037", - "description": "35.91186271093947 - 0.4", - "type": "color" + "type": "color", + "description": "35.91186271093947 - 0.4" }, "425": { "value": "#bb003d", - "description": "39.37079885540354 - 0.425", - "type": "color" + "type": "color", + "description": "39.37079885540354 - 0.425" }, "450": { "value": "#cb0043", - "description": "42.88370756099135 - 0.45", - "type": "color" + "type": "color", + "description": "42.88370756099135 - 0.45" }, "475": { "value": "#db0049", - "description": "46.43278391020581 - 0.475", - "type": "color" + "type": "color", + "description": "46.43278391020581 - 0.475" }, "500": { "value": "#ec004f", - "description": "49.99999999999999 - 0.5", - "type": "color" + "type": "color", + "description": "49.99999999999999 - 0.5" }, "525": { "value": "#fd0056", - "description": "53.56721608979418 - 0.525", - "type": "color" + "type": "color", + "description": "53.56721608979418 - 0.525" }, "550": { "value": "#ff3865", - "description": "57.11629243900867 - 0.55", - "type": "color" + "type": "color", + "description": "57.11629243900867 - 0.55" }, "575": { "value": "#ff5474", - "description": "60.62920114459644 - 0.575", - "type": "color" + "type": "color", + "description": "60.62920114459644 - 0.575" }, "600": { "value": "#ff6981", - "description": "64.08813728906053 - 0.6", - "type": "color" + "type": "color", + "description": "64.08813728906053 - 0.6" }, "625": { "value": "#ff7a8e", - "description": "67.47562871369087 - 0.625", - "type": "color" + "type": "color", + "description": "67.47562871369087 - 0.625" }, "650": { "value": "#ff8a9a", - "description": "70.774643740233 - 0.65", - "type": "color" + "type": "color", + "description": "70.774643740233 - 0.65" }, "675": { "value": "#ff99a6", - "description": "73.96869617684807 - 0.675", - "type": "color" + "type": "color", + "description": "73.96869617684807 - 0.675" }, "700": { "value": "#ffa6b1", - "description": "77.04194696096773 - 0.7", - "type": "color" + "type": "color", + "description": "77.04194696096773 - 0.7" }, "725": { "value": "#ffb2bb", - "description": "79.97930181238189 - 0.725", - "type": "color" + "type": "color", + "description": "79.97930181238189 - 0.725" }, "750": { "value": "#ffbdc5", - "description": "82.76650429449552 - 0.75", - "type": "color" + "type": "color", + "description": "82.76650429449552 - 0.75" }, "775": { "value": "#ffc8ce", - "description": "85.39022371000115 - 0.775", - "type": "color" + "type": "color", + "description": "85.39022371000115 - 0.775" }, "800": { "value": "#ffd1d6", - "description": "87.83813728906054 - 0.8", - "type": "color" + "type": "color", + "description": "87.83813728906054 - 0.8" }, "825": { "value": "#ffdade", - "description": "90.09900616327847 - 0.825", - "type": "color" + "type": "color", + "description": "90.09900616327847 - 0.825" }, "850": { "value": "#ffe2e5", - "description": "92.16274465706378 - 0.85", - "type": "color" + "type": "color", + "description": "92.16274465706378 - 0.85" }, "875": { "value": "#ffe9eb", - "description": "94.02048246917325 - 0.875", - "type": "color" + "type": "color", + "description": "94.02048246917325 - 0.875" }, "900": { "value": "#ffeff0", - "description": "95.66461936106825 - 0.9", - "type": "color" + "type": "color", + "description": "95.66461936106825 - 0.9" }, "925": { "value": "#fff4f5", - "description": "97.08887201491288 - 0.925", - "type": "color" + "type": "color", + "description": "97.08887201491288 - 0.925" }, "950": { "value": "#fff9f9", - "description": "98.28831277231767 - 0.95", - "type": "color" + "type": "color", + "description": "98.28831277231767 - 0.95" }, "975": { "value": "#fffcfd", - "description": "99.25940001499231 - 0.975", - "type": "color" + "type": "color", + "description": "99.25940001499231 - 0.975" }, "1000": { "value": "#ffffff", - "description": "100 - 1", - "type": "color" + "type": "color", + "description": "100 - 1" } }, "Blue": { "0": { "value": "#000000", - "description": "0 - 0", - "type": "color" + "type": "color", + "description": "0 - 0" }, "25": { "value": "#00020f", - "description": "0.7405999850077014 - 0.025", - "type": "color" + "type": "color", + "description": "0.7405999850077014 - 0.025" }, "50": { "value": "#00051c", - "description": "1.7116872276823336 - 0.05", - "type": "color" + "type": "color", + "description": "1.7116872276823336 - 0.05" }, "75": { "value": "#000826", - "description": "2.911127985087129 - 0.075", - "type": "color" + "type": "color", + "description": "2.911127985087129 - 0.075" }, "100": { "value": "#000c30", - "description": "4.335380638931743 - 0.1", - "type": "color" + "type": "color", + "description": "4.335380638931743 - 0.1" }, "125": { "value": "#001038", - "description": "5.979517530826747 - 0.125", - "type": "color" + "type": "color", + "description": "5.979517530826747 - 0.125" }, "150": { "value": "#001441", - "description": "7.837255342936205 - 0.15", - "type": "color" + "type": "color", + "description": "7.837255342936205 - 0.15" }, "175": { "value": "#001749", - "description": "9.90099383672154 - 0.175", - "type": "color" + "type": "color", + "description": "9.90099383672154 - 0.175" }, "200": { "value": "#001b53", - "description": "12.16186271093947 - 0.2", - "type": "color" + "type": "color", + "description": "12.16186271093947 - 0.2" }, "225": { "value": "#00205e", - "description": "14.609776289998841 - 0.225", - "type": "color" + "type": "color", + "description": "14.609776289998841 - 0.225" }, "250": { "value": "#002569", - "description": "17.233495705504463 - 0.25", - "type": "color" + "type": "color", + "description": "17.233495705504463 - 0.25" }, "275": { "value": "#002a76", - "description": "20.02069818761812 - 0.275", - "type": "color" + "type": "color", + "description": "20.02069818761812 - 0.275" }, "300": { "value": "#003084", - "description": "22.958053039032254 - 0.3", - "type": "color" + "type": "color", + "description": "22.958053039032254 - 0.3" }, "325": { "value": "#003692", - "description": "26.03130382315192 - 0.325", - "type": "color" + "type": "color", + "description": "26.03130382315192 - 0.325" }, "350": { "value": "#003da1", - "description": "29.22535625976699 - 0.35", - "type": "color" + "type": "color", + "description": "29.22535625976699 - 0.35" }, "375": { "value": "#0044b2", - "description": "32.524371286309126 - 0.375", - "type": "color" + "type": "color", + "description": "32.524371286309126 - 0.375" }, "400": { "value": "#004bc2", - "description": "35.91186271093947 - 0.4", - "type": "color" + "type": "color", + "description": "35.91186271093947 - 0.4" }, "425": { "value": "#0053d4", - "description": "39.37079885540354 - 0.425", - "type": "color" + "type": "color", + "description": "39.37079885540354 - 0.425" }, "450": { "value": "#005ae6", - "description": "42.88370756099135 - 0.45", - "type": "color" + "type": "color", + "description": "42.88370756099135 - 0.45" }, "475": { "value": "#0062f9", - "description": "46.43278391020581 - 0.475", - "type": "color" + "type": "color", + "description": "46.43278391020581 - 0.475" }, "500": { "value": "#2a6cff", - "description": "49.99999999999999 - 0.5", - "type": "color" + "type": "color", + "description": "49.99999999999999 - 0.5" }, "525": { "value": "#4676ff", - "description": "53.56721608979418 - 0.525", - "type": "color" + "type": "color", + "description": "53.56721608979418 - 0.525" }, "550": { "value": "#5a81ff", - "description": "57.11629243900867 - 0.55", - "type": "color" + "type": "color", + "description": "57.11629243900867 - 0.55" }, "575": { "value": "#6b8bff", - "description": "60.62920114459644 - 0.575", - "type": "color" + "type": "color", + "description": "60.62920114459644 - 0.575" }, "600": { "value": "#7a95ff", - "description": "64.08813728906053 - 0.6", - "type": "color" + "type": "color", + "description": "64.08813728906053 - 0.6" }, "625": { "value": "#899fff", - "description": "67.47562871369087 - 0.625", - "type": "color" + "type": "color", + "description": "67.47562871369087 - 0.625" }, "650": { "value": "#96a8ff", - "description": "70.774643740233 - 0.65", - "type": "color" + "type": "color", + "description": "70.774643740233 - 0.65" }, "675": { "value": "#a2b2ff", - "description": "73.96869617684807 - 0.675", - "type": "color" + "type": "color", + "description": "73.96869617684807 - 0.675" }, "700": { "value": "#adbbff", - "description": "77.04194696096773 - 0.7", - "type": "color" + "type": "color", + "description": "77.04194696096773 - 0.7" }, "725": { "value": "#b8c3ff", - "description": "79.97930181238189 - 0.725", - "type": "color" + "type": "color", + "description": "79.97930181238189 - 0.725" }, "750": { "value": "#c2ccff", - "description": "82.76650429449552 - 0.75", - "type": "color" + "type": "color", + "description": "82.76650429449552 - 0.75" }, "775": { "value": "#ccd3ff", - "description": "85.39022371000115 - 0.775", - "type": "color" + "type": "color", + "description": "85.39022371000115 - 0.775" }, "800": { "value": "#d4dbff", - "description": "87.83813728906054 - 0.8", - "type": "color" + "type": "color", + "description": "87.83813728906054 - 0.8" }, "825": { "value": "#dce1ff", - "description": "90.09900616327847 - 0.825", - "type": "color" + "type": "color", + "description": "90.09900616327847 - 0.825" }, "850": { "value": "#e4e7ff", - "description": "92.16274465706378 - 0.85", - "type": "color" + "type": "color", + "description": "92.16274465706378 - 0.85" }, "875": { "value": "#eaedff", - "description": "94.02048246917325 - 0.875", - "type": "color" + "type": "color", + "description": "94.02048246917325 - 0.875" }, "900": { "value": "#f0f2ff", - "description": "95.66461936106825 - 0.9", - "type": "color" + "type": "color", + "description": "95.66461936106825 - 0.9" }, "925": { "value": "#f5f6ff", - "description": "97.08887201491288 - 0.925", - "type": "color" + "type": "color", + "description": "97.08887201491288 - 0.925" }, "950": { "value": "#f9faff", - "description": "98.28831277231767 - 0.95", - "type": "color" + "type": "color", + "description": "98.28831277231767 - 0.95" }, "975": { "value": "#fcfdff", - "description": "99.25940001499231 - 0.975", - "type": "color" + "type": "color", + "description": "99.25940001499231 - 0.975" }, "1000": { "value": "#ffffff", - "description": "100 - 1", - "type": "color" + "type": "color", + "description": "100 - 1" } }, "Purple": { "0": { "value": "#000000", - "description": "0 - 0", - "type": "color" + "type": "color", + "description": "0 - 0" }, "25": { "value": "#060011", - "description": "0.7405999850077014 - 0.025", - "type": "color" + "type": "color", + "description": "0.7405999850077014 - 0.025" }, "50": { "value": "#0e001e", - "description": "1.7116872276823336 - 0.05", - "type": "color" + "type": "color", + "description": "1.7116872276823336 - 0.05" }, "75": { "value": "#150029", - "description": "2.911127985087129 - 0.075", - "type": "color" + "type": "color", + "description": "2.911127985087129 - 0.075" }, "100": { "value": "#1b0033", - "description": "4.335380638931743 - 0.1", - "type": "color" + "type": "color", + "description": "4.335380638931743 - 0.1" }, "125": { "value": "#21003d", - "description": "5.979517530826747 - 0.125", - "type": "color" + "type": "color", + "description": "5.979517530826747 - 0.125" }, "150": { "value": "#270046", - "description": "7.837255342936205 - 0.15", - "type": "color" + "type": "color", + "description": "7.837255342936205 - 0.15" }, "175": { "value": "#2d004f", - "description": "9.90099383672154 - 0.175", - "type": "color" + "type": "color", + "description": "9.90099383672154 - 0.175" }, "200": { "value": "#330059", - "description": "12.16186271093947 - 0.2", - "type": "color" + "type": "color", + "description": "12.16186271093947 - 0.2" }, "225": { "value": "#3a0065", - "description": "14.609776289998841 - 0.225", - "type": "color" + "type": "color", + "description": "14.609776289998841 - 0.225" }, "250": { "value": "#420071", - "description": "17.233495705504463 - 0.25", - "type": "color" + "type": "color", + "description": "17.233495705504463 - 0.25" }, "275": { "value": "#4b007e", - "description": "20.02069818761812 - 0.275", - "type": "color" + "type": "color", + "description": "20.02069818761812 - 0.275" }, "300": { "value": "#54008d", - "description": "22.958053039032254 - 0.3", - "type": "color" + "type": "color", + "description": "22.958053039032254 - 0.3" }, "325": { "value": "#5d009c", - "description": "26.03130382315192 - 0.325", - "type": "color" + "type": "color", + "description": "26.03130382315192 - 0.325" }, "350": { "value": "#6800ad", - "description": "29.22535625976699 - 0.35", - "type": "color" + "type": "color", + "description": "29.22535625976699 - 0.35" }, "375": { "value": "#7200be", - "description": "32.524371286309126 - 0.375", - "type": "color" + "type": "color", + "description": "32.524371286309126 - 0.375" }, "400": { "value": "#7e00d0", - "description": "35.91186271093947 - 0.4", - "type": "color" + "type": "color", + "description": "35.91186271093947 - 0.4" }, "425": { "value": "#8a00e2", - "description": "39.37079885540354 - 0.425", - "type": "color" + "type": "color", + "description": "39.37079885540354 - 0.425" }, "450": { "value": "#9600f6", - "description": "42.88370756099135 - 0.45", - "type": "color" + "type": "color", + "description": "42.88370756099135 - 0.45" }, "475": { "value": "#9e22ff", - "description": "46.43278391020581 - 0.475", - "type": "color" + "type": "color", + "description": "46.43278391020581 - 0.475" }, "500": { "value": "#a23eff", - "description": "49.99999999999999 - 0.5", - "type": "color" + "type": "color", + "description": "49.99999999999999 - 0.5" }, "525": { "value": "#a752ff", - "description": "53.56721608979418 - 0.525", - "type": "color" + "type": "color", + "description": "53.56721608979418 - 0.525" }, "550": { "value": "#ac63ff", - "description": "57.11629243900867 - 0.55", - "type": "color" + "type": "color", + "description": "57.11629243900867 - 0.55" }, "575": { "value": "#b273ff", - "description": "60.62920114459644 - 0.575", - "type": "color" + "type": "color", + "description": "60.62920114459644 - 0.575" }, "600": { "value": "#b780ff", - "description": "64.08813728906053 - 0.6", - "type": "color" + "type": "color", + "description": "64.08813728906053 - 0.6" }, "625": { "value": "#bd8eff", - "description": "67.47562871369087 - 0.625", - "type": "color" + "type": "color", + "description": "67.47562871369087 - 0.625" }, "650": { "value": "#c39aff", - "description": "70.774643740233 - 0.65", - "type": "color" + "type": "color", + "description": "70.774643740233 - 0.65" }, "675": { "value": "#c9a5ff", - "description": "73.96869617684807 - 0.675", - "type": "color" + "type": "color", + "description": "73.96869617684807 - 0.675" }, "700": { "value": "#cfb0ff", - "description": "77.04194696096773 - 0.7", - "type": "color" + "type": "color", + "description": "77.04194696096773 - 0.7" }, "725": { "value": "#d4bbff", - "description": "79.97930181238189 - 0.725", - "type": "color" + "type": "color", + "description": "79.97930181238189 - 0.725" }, "750": { "value": "#dac4ff", - "description": "82.76650429449552 - 0.75", - "type": "color" + "type": "color", + "description": "82.76650429449552 - 0.75" }, "775": { "value": "#dfcdff", - "description": "85.39022371000115 - 0.775", - "type": "color" + "type": "color", + "description": "85.39022371000115 - 0.775" }, "800": { "value": "#e4d6ff", - "description": "87.83813728906054 - 0.8", - "type": "color" + "type": "color", + "description": "87.83813728906054 - 0.8" }, "825": { "value": "#e9ddff", - "description": "90.09900616327847 - 0.825", - "type": "color" + "type": "color", + "description": "90.09900616327847 - 0.825" }, "850": { "value": "#eee4ff", - "description": "92.16274465706378 - 0.85", - "type": "color" + "type": "color", + "description": "92.16274465706378 - 0.85" }, "875": { "value": "#f2ebff", - "description": "94.02048246917325 - 0.875", - "type": "color" + "type": "color", + "description": "94.02048246917325 - 0.875" }, "900": { "value": "#f5f0ff", - "description": "95.66461936106825 - 0.9", - "type": "color" + "type": "color", + "description": "95.66461936106825 - 0.9" }, "925": { "value": "#f8f5ff", - "description": "97.08887201491288 - 0.925", - "type": "color" + "type": "color", + "description": "97.08887201491288 - 0.925" }, "950": { "value": "#fbf9ff", - "description": "98.28831277231767 - 0.95", - "type": "color" + "type": "color", + "description": "98.28831277231767 - 0.95" }, "975": { "value": "#fdfcff", - "description": "99.25940001499231 - 0.975", - "type": "color" + "type": "color", + "description": "99.25940001499231 - 0.975" }, "1000": { "value": "#ffffff", - "description": "100 - 1", - "type": "color" + "type": "color", + "description": "100 - 1" } }, "OpaqueGrey": { "Default": { "value": "#7c7c7c20", - "description": "An opaque grey that picks up some, but not all, of the colors behind it", + "type": "color", + "description": "An opaque grey that picks up some, but not all, of the colors behind it" + } + }, + "Gray": { + "25": { + "value": "#fcfcfd", + "type": "color" + }, + "50": { + "value": "#f9fafb", + "type": "color" + }, + "100": { + "value": "#f2f4f7", + "type": "color" + }, + "200": { + "value": "#eaecf0", + "type": "color" + }, + "300": { + "value": "#d0d5dd", + "type": "color" + }, + "400": { + "value": "#98a2b3", + "type": "color" + }, + "500": { + "value": "#667085", + "type": "color" + }, + "600": { + "value": "#475467", + "type": "color" + }, + "700": { + "value": "#344054", + "type": "color" + }, + "800": { + "value": "#1d2939", + "type": "color" + }, + "900": { + "value": "#101828", + "type": "color" + } + }, + "White": { + "value": "#ffffff", + "type": "color" + }, + "Black": { + "value": "#000000", + "type": "color" + }, + "Primary": { + "25": { + "value": "#f6fcfe", + "type": "color" + }, + "50": { + "value": "#dceffc", + "type": "color" + }, + "100": { + "value": "#c1dcf9", + "type": "color" + }, + "200": { + "value": "#a7c3f6", + "type": "color" + }, + "300": { + "value": "#8da4f4", + "type": "color" + }, + "400": { + "value": "#6178f4", + "type": "color" + }, + "500": { + "value": "#4c4ff1", + "type": "color" + }, + "600": { + "value": "#4e38ed", + "type": "color" + }, + "700": { + "value": "#5724e8", + "type": "color" + }, + "800": { + "value": "#441bb6", + "type": "color" + }, + "900": { + "value": "#301383", + "type": "color" + }, + "Default": { + "value": "#4e38ed", + "type": "color" + }, + "Hover": { + "value": "#c1dcf9", + "type": "color" + } + }, + "Warning": { + "25": { + "value": "#fffcf5", + "type": "color" + }, + "50": { + "value": "#fffaeb", + "type": "color" + }, + "100": { + "value": "#fef0c7", + "type": "color" + }, + "200": { + "value": "#fedf89", + "type": "color" + }, + "300": { + "value": "#fec84b", + "type": "color" + }, + "400": { + "value": "#fdb022", + "type": "color" + }, + "500": { + "value": "#f79009", + "type": "color" + }, + "600": { + "value": "#dc6803", + "type": "color" + }, + "700": { + "value": "#b54708", + "type": "color" + } + }, + "Error": { + "25": { + "value": "#fffbfa", + "type": "color" + }, + "50": { + "value": "#fef3f2", + "type": "color" + }, + "100": { + "value": "#fee4ef", + "type": "color" + }, + "200": { + "value": "#fecdca", + "type": "color" + }, + "300": { + "value": "#fda29b", + "type": "color" + }, + "400": { + "value": "#f97066", + "type": "color" + }, + "500": { + "value": "#f04438", + "type": "color" + }, + "600": { + "value": "#d92d20", + "type": "color" + }, + "700": { + "value": "#b42318", + "type": "color" + } + }, + "Success": { + "25": { + "value": "#f6fef9", + "type": "color" + }, + "50": { + "value": "#ecfdf3", + "type": "color" + }, + "100": { + "value": "#d1fadf", + "type": "color" + }, + "200": { + "value": "#a6f4c5", + "type": "color" + }, + "300": { + "value": "#6ce9a6", + "type": "color" + }, + "400": { + "value": "#32d583", + "type": "color" + }, + "500": { + "value": "#12b76a", + "type": "color" + }, + "600": { + "value": "#039855", + "type": "color" + }, + "700": { + "value": "#027a48", + "type": "color" + } + }, + "Pink": { + "700": { + "value": "#c11574", + "type": "color" + } + }, + "Text": { + "Primary": { + "value": "#101828", + "type": "color" + }, + "Secondary": { + "value": "#667085", "type": "color" } } @@ -1175,8 +1418,8 @@ "lineHeight": "12px", "letterSpacing": "-0.12px" }, - "description": "", - "type": "typography" + "type": "typography", + "description": "" }, "200": { "value": { @@ -1186,8 +1429,8 @@ "lineHeight": "16px", "letterSpacing": "-0.15px" }, - "description": "", - "type": "typography" + "type": "typography", + "description": "" } }, "Shadow": { @@ -1199,8 +1442,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "200": { "value": { @@ -1210,8 +1453,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "300": { "value": { @@ -1221,8 +1464,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "400": { "value": { @@ -1232,8 +1475,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "500": { "value": { @@ -1243,8 +1486,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "600": { "value": { @@ -1254,8 +1497,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "700": { "value": { @@ -1265,8 +1508,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "800": { "value": { @@ -1276,8 +1519,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "900": { "value": { @@ -1287,8 +1530,8 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" }, "1000": { "value": { @@ -1298,98 +1541,102 @@ "spread": "0px", "color": "rgba(0, 0, 0, .2)" }, - "description": "", - "type": "boxShadow" + "type": "boxShadow", + "description": "" } }, "Radius": { "0": { "value": "0", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "100": { "value": "2", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "200": { "value": "6", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "300": { "value": "14", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "400": { "value": "30", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "500": { "value": "62", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" } }, "Spacing": { "0": { "value": "0", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "100": { "value": "2", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "200": { "value": "4", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "300": { "value": "8", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "400": { "value": "12", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "500": { "value": "16", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "600": { "value": "32", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "700": { "value": "48", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "800": { "value": "64", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "900": { "value": "96", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" }, "1000": { "value": "128", - "description": "", - "type": "dimension" + "type": "dimension", + "description": "" } } + }, + "$themes": [], + "$metadata": { + "tokenSetOrder": ["Alias", "Global"] } } diff --git a/crates/re_ui/data/icons/rerun_menu.png b/crates/re_ui/data/icons/rerun_menu.png index e0326623c56e..b74059bf3dfa 100644 Binary files a/crates/re_ui/data/icons/rerun_menu.png and b/crates/re_ui/data/icons/rerun_menu.png differ diff --git a/crates/re_ui/data/logo_dark_mode.png b/crates/re_ui/data/logo_dark_mode.png index 2eaf405d82e9..1b951089cf2a 100644 Binary files a/crates/re_ui/data/logo_dark_mode.png and b/crates/re_ui/data/logo_dark_mode.png differ diff --git a/crates/re_ui/data/logo_light_mode.png b/crates/re_ui/data/logo_light_mode.png index 4675fec247da..1b951089cf2a 100644 Binary files a/crates/re_ui/data/logo_light_mode.png and b/crates/re_ui/data/logo_light_mode.png differ diff --git a/crates/re_ui/src/command.rs b/crates/re_ui/src/command.rs index 55f1aa2a430a..f41bf4fa43b0 100644 --- a/crates/re_ui/src/command.rs +++ b/crates/re_ui/src/command.rs @@ -72,7 +72,7 @@ impl Command { Command::Open => ("Open…", "Open a Rerun Data File (.rrd)"), #[cfg(not(target_arch = "wasm32"))] - Command::Quit => ("Quit", "Close the Rerun Viewer"), + Command::Quit => ("Quit", "Close the Depthai Viewer"), Command::ResetViewer => ( "Reset viewer", @@ -87,7 +87,7 @@ impl Command { Command::ToggleMemoryPanel => ( "Toggle memory panel", - "Investigate what is using up RAM in Rerun Viewer", + "Investigate what is using up RAM in Depthai Viewer", ), Command::ToggleBlueprintPanel => ("Toggle blueprint panel", "Toggle the left panel"), Command::ToggleSelectionPanel => ("Toggle selection panel", "Toggle the right panel"), diff --git a/crates/re_ui/src/design_tokens.rs b/crates/re_ui/src/design_tokens.rs index 4a3642695e87..83b53c7c9212 100644 --- a/crates/re_ui/src/design_tokens.rs +++ b/crates/re_ui/src/design_tokens.rs @@ -11,6 +11,14 @@ pub struct DesignTokens { pub bottom_bar_stroke: egui::Stroke, pub bottom_bar_rounding: egui::Rounding, pub shadow_gradient_dark_start: egui::Color32, + pub success_bg_color: egui::Color32, + pub success_hover_bg_color: egui::Color32, + pub warning_bg_color: egui::Color32, + pub warning_hover_bg_color: egui::Color32, + pub error_bg_color: egui::Color32, + pub error_hover_bg_color: egui::Color32, + pub primary_bg_color: egui::Color32, + pub primary_hover_bg_color: egui::Color32, } impl DesignTokens { @@ -46,7 +54,7 @@ fn apply_design_tokens(ctx: &egui::Context) -> DesignTokens { } let mut egui_style = egui::Style { - visuals: egui::Visuals::dark(), + visuals: egui::Visuals::light(), ..Default::default() }; @@ -71,26 +79,25 @@ fn apply_design_tokens(ctx: &egui::Context) -> DesignTokens { } let panel_bg_color = get_aliased_color(&json, "{Alias.Color.Surface.Default.value}"); - // let floating_color = get_aliased_color(&json, "{Alias.Color.Surface.Floating.value}"); - let floating_color = Color32::from_gray(38); // TODO(emilk): change the content of the design_tokens.json origin instead + let floating_color = get_aliased_color(&json, "{Alias.Color.Surface.Floating.value}"); // Used as the background of text edits, scroll bars and others things // that needs to look different from other interactive stuff. // We need this very dark, since the theme overall is very, very dark. - egui_style.visuals.extreme_bg_color = egui::Color32::BLACK; + egui_style.visuals.extreme_bg_color = egui::Color32::WHITE; egui_style.visuals.widgets.noninteractive.weak_bg_fill = panel_bg_color; egui_style.visuals.widgets.noninteractive.bg_fill = panel_bg_color; egui_style.visuals.button_frame = true; - egui_style.visuals.widgets.inactive.weak_bg_fill = Default::default(); // Buttons have no background color when inactive - egui_style.visuals.widgets.inactive.bg_fill = Color32::from_gray(40); - // get_aliased_color(&json, "{Alias.Color.Action.Default.value}"); // too dark to see, especially for scroll bars + egui_style.visuals.widgets.inactive.weak_bg_fill = + get_aliased_color(&json, "{Alias.Color.Action.Inactive.value}"); // Buttons have no background color when inactive + egui_style.visuals.widgets.inactive.bg_fill = + get_aliased_color(&json, "{Alias.Color.Action.Default.value}"); { // Background colors for buttons (menu buttons, blueprint buttons, etc) when hovered or clicked: - // let hovered_color = get_aliased_color(&json, "{Alias.Color.Action.Hovered.value}"); - let hovered_color = Color32::from_gray(64); // TODO(emilk): change the content of the design_tokens.json origin instead + let hovered_color = get_aliased_color(&json, "{Alias.Color.Action.Hovered.value}"); egui_style.visuals.widgets.hovered.weak_bg_fill = hovered_color; egui_style.visuals.widgets.hovered.bg_fill = hovered_color; egui_style.visuals.widgets.active.weak_bg_fill = hovered_color; @@ -163,26 +170,39 @@ fn apply_design_tokens(ctx: &egui::Context) -> DesignTokens { ctx.set_style(egui_style); DesignTokens { - top_bar_color: Color32::from_gray(20), // copied from figma - bottom_bar_color: get_global_color(&json, "{Global.Color.Grey.150}"), - bottom_bar_stroke: egui::Stroke::new(1.0, egui::Color32::from_gray(47)), // copied from figma + top_bar_color: get_global_color(&json, "{Global.Color.Gray.200}"), // copied from figma + bottom_bar_color: get_global_color(&json, "{Global.Color.Gray.100}"), + bottom_bar_stroke: egui::Stroke::new( + 1.0, + Color32::TRANSPARENT, // Transparent because it doesn't look good in light mode + ), // copied from figma bottom_bar_rounding: egui::Rounding { nw: 6.0, ne: 6.0, sw: 0.0, se: 0.0, }, // copied from figma, should be top only - shadow_gradient_dark_start: egui::Color32::from_black_alpha(77), + shadow_gradient_dark_start: Color32::TRANSPARENT, + success_bg_color: get_global_color(&json, "{Global.Color.Success.200}"), + success_hover_bg_color: get_global_color(&json, "{Global.Color.Success.300}"), + warning_bg_color: get_global_color(&json, "{Global.Color.Warning.200}"), + warning_hover_bg_color: get_global_color(&json, "{Global.Color.Warning.300}"), + error_bg_color: get_global_color(&json, "{Global.Color.Error.200}"), + error_hover_bg_color: get_global_color(&json, "{Global.Color.Error.300}"), + primary_bg_color: get_global_color(&json, "{Global.Color.Primary.Default}"), + primary_hover_bg_color: get_global_color(&json, "{Global.Color.Primary.500}"), } } // ---------------------------------------------------------------------------- fn get_aliased_color(json: &serde_json::Value, alias_path: &str) -> egui::Color32 { + re_log::debug!("Alias path: {alias_path}"); parse_color(get_alias_str(json, alias_path)) } fn get_global_color(json: &serde_json::Value, global_path: &str) -> egui::Color32 { + re_log::debug!("Global path: {global_path}"); parse_color(global_path_value(json, global_path).as_str().unwrap()) } diff --git a/crates/re_viewer/Cargo.toml b/crates/re_viewer/Cargo.toml index 8ec7e70ddc1b..645f75e77749 100644 --- a/crates/re_viewer/Cargo.toml +++ b/crates/re_viewer/Cargo.toml @@ -17,6 +17,7 @@ include = [ "Cargo.toml", "data/*", ] +resolver = "2" [package.metadata.docs.rs] all-features = true @@ -28,14 +29,15 @@ crate-type = ["cdylib", "rlib"] [features] -default = ["analytics"] +default = ["analytics", "webgl"] ## Enable telemetry using our analytics SDK. analytics = ["dep:re_analytics"] +## Render using webgl instead of webgpu on wasm builds. +webgl = ["re_renderer/webgl"] -[dependencies] -# Internal: +[dependencies] # Internal: re_arrow_store.workspace = true re_build_info.workspace = true re_data_store = { workspace = true, features = ["serde"] } @@ -46,11 +48,21 @@ re_log_types = { workspace = true, features = ["ecolor", "glam", "image"] } re_log.workspace = true re_memory.workspace = true re_query.workspace = true -re_renderer = { workspace = true, features = ["arrow", "serde"] } +re_renderer = { workspace = true, default-features = false, features = [ + "arrow", + "import-gltf", + "import-obj", + "serde", +] } re_smart_channel.workspace = true re_tensor_ops.workspace = true re_ui.workspace = true re_ws_comms = { workspace = true, features = ["client"] } +serde_json = "1" +tokio = { workspace = true, default-features = false, features = ["rt"] } +ewebsock = { version = "0.2", optional = false } +strum = { version = "0.24", features = ["derive"] } +strum_macros = "0.24" # Internal (optional): re_analytics = { workspace = true, optional = true } @@ -60,15 +72,16 @@ re_analytics = { workspace = true, optional = true } ahash.workspace = true anyhow.workspace = true bytemuck = { version = "1.11", features = ["extern_crate_alloc"] } -eframe = { workspace = true, default-features = false, features = [ +crossbeam-channel = "0.5.7" +eframe = { workspace = true, features = [ "default_fonts", "persistence", "puffin", "wgpu", ] } -egui = { workspace = true, features = ["extra_debug_asserts", "tracing"] } +egui.workspace = true egui_dock = { workspace = true, features = ["serde"] } -egui_extras = { workspace = true, features = ["tracing"] } +egui_extras.workspace = true egui-wgpu.workspace = true enumset.workspace = true glam = { workspace = true, features = [ @@ -93,16 +106,24 @@ serde = { version = "1", features = ["derive"] } slotmap = { version = "1.0.6", features = ["serde"] } smallvec = { workspace = true, features = ["serde"] } thiserror.workspace = true -time = { workspace = true, default-features = false, features = ["formatting"] } +time = { workspace = true, default-features = false, features = [ + "formatting", + "wasm-bindgen", +] } uuid = { version = "1.1", features = ["serde", "v4", "js"] } vec1 = "1.8" wgpu.workspace = true +url = "2.3.1" # native dependencies: [target.'cfg(not(target_arch = "wasm32"))'.dependencies] arboard = { version = "3.2", default-features = false, features = [ "image-data", ] } +pyo3 = { version = "0.18.0", features = ["auto-initialize"] } +pyo3-asyncio = { version = "0.18", features = ["attributes", "tokio-runtime"] } +async-std = "1.9" + puffin_http = "0.11" puffin.workspace = true diff --git a/crates/re_viewer/src/app.rs b/crates/re_viewer/src/app.rs index c374cc5a1aa3..acebc22d5d51 100644 --- a/crates/re_viewer/src/app.rs +++ b/crates/re_viewer/src/app.rs @@ -17,6 +17,7 @@ use re_ui::{toasts, Command}; use crate::{ app_icon::setup_app_icon, + depthai::depthai, misc::{AppOptions, Caches, RecordingConfig, ViewerContext}, ui::{data_ui::ComponentUiRegistry, Blueprint}, viewer_analytics::ViewerAnalytics, @@ -99,9 +100,47 @@ pub struct App { analytics: ViewerAnalytics, icon_status: AppIconStatus, + + #[cfg(not(target_arch = "wasm32"))] + backend_handle: Option, } impl App { + #[cfg(not(target_arch = "wasm32"))] + fn spawn_backend() -> Option { + // TODO(filip): Is there some way I can know for sure where depthai_viewer_backend is? + let backend_handle = match std::process::Command::new("python") + .args(["-m", "depthai_viewer_backend"]) + .spawn() + { + Ok(child) => { + println!("Backend started successfully."); + Some(child) + } + Err(err) => { + eprintln!("Failed to start depthai viewer: {err}"); + match std::process::Command::new("python3") + .args(["-m", "depthai_viewer_backend"]) + .spawn() + { + Ok(child) => { + println!("Backend started successfully."); + Some(child) + } + Err(err) => { + eprintln!("Failed to start depthai_viewer {err}"); + None + } + } + } + }; + // assert!( + // backend_handle.is_some(), + // "Couldn't start backend, exiting..." + // ); + backend_handle + } + /// Create a viewer that receives new log messages over time pub fn from_receiver( build_info: re_build_info::BuildInfo, @@ -157,6 +196,8 @@ impl App { analytics, icon_status: AppIconStatus::NotSetTryAgain, + #[cfg(not(target_arch = "wasm32"))] + backend_handle: App::spawn_backend(), } } @@ -263,6 +304,10 @@ impl App { } #[cfg(not(target_arch = "wasm32"))] Command::Quit => { + self.state.depthai_state.shutdown(); + if let Some(backend_handle) = &mut self.backend_handle { + backend_handle.kill(); + } _frame.close(); } @@ -425,6 +470,15 @@ impl eframe::App for App { [0.0; 4] // transparent so we can get rounded corners when doing [`re_ui::CUSTOM_WINDOW_DECORATIONS`] } + #[cfg(not(target_arch = "wasm32"))] + fn on_close_event(&mut self) -> bool { + self.state.depthai_state.shutdown(); + if let Some(backend_handle) = &mut self.backend_handle { + backend_handle.kill(); + } + true + } + fn save(&mut self, storage: &mut dyn eframe::Storage) { if self.startup_options.persist_state { eframe::set_value(storage, eframe::APP_KEY, &self.state); @@ -433,6 +487,27 @@ impl eframe::App for App { fn update(&mut self, egui_ctx: &egui::Context, frame: &mut eframe::Frame) { let frame_start = Instant::now(); + self.state.depthai_state.update(); // Always update depthai state + #[cfg(not(target_arch = "wasm32"))] + { + match &mut self.backend_handle { + Some(handle) => match handle.try_wait() { + Ok(status) => { + if status.is_some() { + handle.kill(); + re_log::debug!("Backend process has exited, restarting!"); + self.backend_handle = App::spawn_backend(); + } + } + Err(_) => {} + }, + None => self.backend_handle = App::spawn_backend(), + }; + } + + if self.backend_handle.is_none() { + self.backend_handle = App::spawn_backend(); + }; if self.startup_options.memory_limit.limit.is_none() { // we only warn about high memory usage if the user hasn't specified a limit @@ -444,8 +519,14 @@ impl eframe::App for App { } if self.shutdown.load(std::sync::atomic::Ordering::Relaxed) { + self.state.depthai_state.shutdown(); #[cfg(not(target_arch = "wasm32"))] - frame.close(); + { + if let Some(backend_handle) = &mut self.backend_handle { + backend_handle.kill(); + } + frame.close(); + } return; } @@ -548,18 +629,14 @@ impl eframe::App for App { .unwrap(); render_ctx.begin_frame(); - if log_db.is_default() { - wait_screen_ui(ui, &self.rx); - } else { - self.state.show( - ui, - render_ctx, - log_db, - &self.re_ui, - &self.component_ui_registry, - self.rx.source(), - ); - } + self.state.show( + ui, + render_ctx, + log_db, + &self.re_ui, + &self.component_ui_registry, + self.rx.source(), + ); render_ctx.before_submit(); } @@ -583,6 +660,7 @@ impl eframe::App for App { egui_ctx.input(|i| i.time), frame_start.elapsed().as_secs_f32(), ); + egui_ctx.request_repaint(); // Force repaint even when out of focus } } @@ -943,6 +1021,7 @@ struct AppState { /// Configuration for the current recording (found in [`LogDb`]). recording_configs: IntMap, + #[serde(skip)] // Quick fix for subscriptions setting, just don't remembet space views blueprints: HashMap, /// Which view panel is currently being shown @@ -951,6 +1030,9 @@ struct AppState { selection_panel: crate::selection_panel::SelectionPanel, time_panel: crate::time_panel::TimePanel, + selected_device: depthai::DeviceId, + depthai_state: depthai::State, + #[cfg(not(target_arch = "wasm32"))] #[serde(skip)] profiler: crate::Profiler, @@ -978,6 +1060,8 @@ impl AppState { blueprints, selection_panel, time_panel, + selected_device, + depthai_state, #[cfg(not(target_arch = "wasm32"))] profiler: _, } = self; @@ -998,12 +1082,14 @@ impl AppState { rec_cfg, re_ui, render_ctx, + depthai_state, }; let blueprint = blueprints .entry(selected_app_id.clone()) .or_insert_with(|| Blueprint::new(ui.ctx())); - time_panel.show_panel(&mut ctx, blueprint, ui); + // Hide time panel for now, reuse for recordings in the future + // time_panel.show_panel(&mut ctx, blueprint, ui); selection_panel.show_panel(&mut ctx, ui, blueprint); let central_panel_frame = egui::Frame { @@ -1345,7 +1431,7 @@ fn frame_time_label_ui(ui: &mut egui::Ui, app: &mut App) { // we use monospace so the width doesn't fluctuate as the numbers change. let text = format!("{ms:.1} ms"); ui.label(egui::RichText::new(text).monospace().color(color)) - .on_hover_text("CPU time used by Rerun Viewer each frame. Lower is better."); + .on_hover_text("CPU time used by Depthai Viewer each frame. Lower is better."); } } @@ -1360,7 +1446,7 @@ fn memory_use_label_ui(ui: &mut egui::Ui, gpu_resource_stats: &WgpuResourcePoolS .color(ui.visuals().weak_text_color()), ) .on_hover_text(format!( - "Rerun Viewer is using {} of RAM in {} separate allocations,\n\ + "Depthai Viewer is using {} of RAM in {} separate allocations,\n\ plus {} of GPU memory in {} textures and {} buffers.", bytes_used_text, format_number(count.count), @@ -1395,7 +1481,7 @@ fn input_latency_label_ui(ui: &mut egui::Ui, app: &mut App) { format_number(queue_len), ); let hover_text = - "When more data is arriving over network than the Rerun Viewer can index, a queue starts building up, leading to latency and increased RAM use.\n\ + "When more data is arriving over network than the Depthai Viewer can index, a queue starts building up, leading to latency and increased RAM use.\n\ This latency does NOT include network latency."; if latency_sec < app.state.app_options.warn_latency { diff --git a/crates/re_viewer/src/depthai/api.rs b/crates/re_viewer/src/depthai/api.rs new file mode 100644 index 000000000000..9e1144053471 --- /dev/null +++ b/crates/re_viewer/src/depthai/api.rs @@ -0,0 +1,76 @@ +use super::depthai; +use super::ws::{BackWsMessage as WsMessage, WebSocket, WsMessageData, WsMessageType}; +use serde::{Deserialize, Serialize}; + +const DEPTHAI_API_URL: &str = "http://localhost:8000"; + +#[derive(Clone, serde::Serialize, serde::Deserialize)] +pub struct ApiError { + pub detail: String, +} + +impl Default for ApiError { + fn default() -> Self { + Self { + detail: "ApiError".to_string(), + } + } +} + +#[derive(Default)] +pub struct BackendCommChannel { + pub ws: WebSocket, +} + +impl BackendCommChannel { + pub fn shutdown(&mut self) { + self.ws.shutdown(); + } + + pub fn set_subscriptions(&mut self, subscriptions: &Vec) { + self.ws.send( + serde_json::to_string(&WsMessage { + kind: WsMessageType::Subscriptions, + data: WsMessageData::Subscriptions(subscriptions.clone()), + }) + .unwrap(), + ); + } + + pub fn set_pipeline(&mut self, config: &depthai::DeviceConfig) { + self.ws.send( + serde_json::to_string(&WsMessage { + kind: WsMessageType::Pipeline, + data: WsMessageData::Pipeline(config.clone()), + }) + .unwrap(), + ); + } + + pub fn receive(&mut self) -> Option { + self.ws.receive() + } + + pub fn get_devices(&mut self) { + self.ws.send( + serde_json::to_string(&WsMessage { + kind: WsMessageType::Devices, + data: WsMessageData::Devices(Vec::new()), + }) + .unwrap(), + ); + } + + pub fn set_device(&mut self, device_id: depthai::DeviceId) { + self.ws.send( + serde_json::to_string(&WsMessage { + kind: WsMessageType::Device, + data: WsMessageData::Device(depthai::Device { + id: device_id, + ..Default::default() + }), + }) + .unwrap(), + ); + } +} diff --git a/crates/re_viewer/src/depthai/depthai.rs b/crates/re_viewer/src/depthai/depthai.rs new file mode 100644 index 000000000000..539b18a807d3 --- /dev/null +++ b/crates/re_viewer/src/depthai/depthai.rs @@ -0,0 +1,729 @@ +use itertools::Itertools; +use re_data_store::EntityPropertyMap; +use re_log_types::{EntityPath, EntityPathHash}; +use std::collections::{BTreeSet, HashMap}; + +use crate::ui::SpaceViewId; + +use super::super::ui::SpaceView; +use super::api::BackendCommChannel; +use super::ws::{BackWsMessage as WsMessage, WsMessageData, WsMessageType}; +use instant::Instant; +use std::fmt; +use std::sync::mpsc::channel; + +use strum::EnumIter; +use strum::IntoEnumIterator; + +#[derive(serde::Deserialize, serde::Serialize, fmt::Debug, PartialEq, Clone, Copy, EnumIter)] +#[allow(non_camel_case_types)] +pub enum ColorCameraResolution { + THE_720_P, + THE_800_P, + THE_1440X1080, + THE_1080_P, + THE_1200_P, + THE_5_MP, + THE_4_K, + THE_12_MP, + THE_4000X3000, + THE_13_MP, + THE_48_MP, +} + +#[derive(serde::Deserialize, serde::Serialize, fmt::Debug, PartialEq, Clone, Copy, EnumIter)] +#[allow(non_camel_case_types)] +pub enum MonoCameraResolution { + THE_400_P, + THE_480_P, + THE_720_P, + THE_800_P, + THE_1200_P, +} + +// fmt::Display is used in UI while fmt::Debug is used with the depthai backend api +impl fmt::Display for ColorCameraResolution { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::THE_1080_P => write!(f, "1080p"), + Self::THE_4_K => write!(f, "4k"), + Self::THE_720_P => write!(f, "720p"), + Self::THE_800_P => write!(f, "800p"), + Self::THE_1200_P => write!(f, "1200p"), + Self::THE_5_MP => write!(f, "5MP"), + Self::THE_12_MP => write!(f, "12MP"), + Self::THE_13_MP => write!(f, "13MP"), + Self::THE_4000X3000 => write!(f, "4000x3000"), + Self::THE_48_MP => write!(f, "48MP"), + Self::THE_1440X1080 => write!(f, "1440x1080"), + } + } +} + +impl fmt::Display for MonoCameraResolution { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::THE_400_P => write!(f, "400p"), + Self::THE_480_P => write!(f, "480p"), + Self::THE_720_P => write!(f, "720p"), + Self::THE_800_P => write!(f, "800p"), + Self::THE_1200_P => write!(f, "1200p"), + } + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq)] +pub struct ColorCameraConfig { + pub fps: u8, + pub resolution: ColorCameraResolution, + #[serde(rename = "xout_video")] + pub stream_enabled: bool, +} + +impl Default for ColorCameraConfig { + fn default() -> Self { + Self { + fps: 30, + resolution: ColorCameraResolution::THE_1080_P, + stream_enabled: true, + } + } +} + +impl fmt::Debug for ColorCameraConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Color camera config: fps: {}, resolution: {:?}", + self.fps, self.resolution, + ) + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, EnumIter, Debug)] +#[allow(non_camel_case_types)] +pub enum BoardSocket { + AUTO, + RGB, + LEFT, + RIGHT, + CENTER, + CAM_A, + CAM_B, + CAM_C, + CAM_D, + CAM_E, + CAM_F, + CAM_G, + CAM_H, +} + +impl Default for BoardSocket { + fn default() -> Self { + Self::AUTO + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq)] +pub struct MonoCameraConfig { + pub fps: u8, + pub resolution: MonoCameraResolution, + pub board_socket: BoardSocket, + #[serde(rename = "xout")] + pub stream_enabled: bool, +} + +impl Default for MonoCameraConfig { + fn default() -> Self { + Self { + fps: 30, + resolution: MonoCameraResolution::THE_800_P, + board_socket: BoardSocket::AUTO, + stream_enabled: false, + } + } +} + +impl fmt::Debug for MonoCameraConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Mono camera config: fps: {}, resolution: {:?}", + self.fps, self.resolution, + ) + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq)] +#[allow(non_camel_case_types)] +pub enum DepthProfilePreset { + HIGH_DENSITY, + HIGH_ACCURACY, +} + +impl Default for DepthProfilePreset { + fn default() -> Self { + Self::HIGH_DENSITY + } +} + +impl fmt::Display for DepthProfilePreset { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::HIGH_DENSITY => write!(f, "High Density"), + Self::HIGH_ACCURACY => write!(f, "High Accuracy"), + } + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, Debug, EnumIter)] +#[allow(non_camel_case_types)] +pub enum DepthMedianFilter { + MEDIAN_OFF, + KERNEL_3x3, + KERNEL_5x5, + KERNEL_7x7, +} + +impl Default for DepthMedianFilter { + fn default() -> Self { + Self::KERNEL_7x7 + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, Debug)] +pub struct DepthConfig { + pub median: DepthMedianFilter, + pub lr_check: bool, + pub lrc_threshold: u64, + pub extended_disparity: bool, + pub subpixel_disparity: bool, + pub sigma: i64, + pub confidence: i64, + pub align: BoardSocket, +} + +impl Default for DepthConfig { + fn default() -> Self { + Self { + median: DepthMedianFilter::default(), + lr_check: true, + lrc_threshold: 5, + extended_disparity: false, + subpixel_disparity: true, + sigma: 0, + confidence: 230, + align: BoardSocket::RGB, + } + } +} + +impl DepthConfig { + pub fn default_as_option() -> Option { + Some(Self::default()) + } +} + +#[derive(Default, serde::Deserialize, serde::Serialize, Clone)] +pub struct DeviceConfig { + pub color_camera: ColorCameraConfig, + pub left_camera: MonoCameraConfig, + pub right_camera: MonoCameraConfig, + #[serde(default = "bool_true")] + pub depth_enabled: bool, // Much easier to have an explicit bool for checkbox + #[serde(default = "DepthConfig::default_as_option")] + pub depth: Option, + pub ai_model: AiModel, +} + +impl PartialEq for DeviceConfig { + fn eq(&self, other: &Self) -> bool { + let depth_eq = match (&self.depth, &other.depth) { + (Some(a), Some(b)) => a == b, + _ => true, // If one is None, it's only different if depth_enabled is different + }; + self.color_camera == other.color_camera + && self.left_camera == other.left_camera + && self.right_camera == other.right_camera + && depth_eq + && self.depth_enabled == other.depth_enabled + && self.ai_model == other.ai_model + } +} + +#[inline] +fn bool_true() -> bool { + true +} + +#[derive(Default, serde::Deserialize, serde::Serialize)] +pub struct DeviceConfigState { + pub config: DeviceConfig, + #[serde(skip)] + pub update_in_progress: bool, +} + +impl fmt::Debug for DeviceConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Device config: {:?} {:?} {:?}, depth: {:?}, ai_model: {:?}, depth_enabled: {:?}", + self.color_camera, + self.left_camera, + self.right_camera, + self.depth, + self.ai_model, + self.depth_enabled, + ) + } +} + +#[derive(serde::Deserialize)] +struct PipelineResponse { + message: String, +} + +impl Default for PipelineResponse { + fn default() -> Self { + Self { + message: "Pipeline not started".to_string(), + } + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, fmt::Debug)] +pub enum ErrorAction { + None, + FullReset, +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, PartialEq, fmt::Debug)] +pub struct Error { + pub action: ErrorAction, + pub message: String, +} + +impl Default for Error { + fn default() -> Self { + Self { + action: ErrorAction::None, + message: String::from("Invalid message"), + } + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, PartialEq, fmt::Debug, Default)] +pub struct Device { + pub id: DeviceId, + pub supported_color_resolutions: Vec, + pub supported_left_mono_resolutions: Vec, + pub supported_right_mono_resolutions: Vec, +} + +#[derive(serde::Deserialize, serde::Serialize, Clone, fmt::Debug)] +pub struct AiModel { + pub path: String, + pub display_name: String, +} + +impl Default for AiModel { + fn default() -> Self { + Self { + path: String::from(""), + display_name: String::from("No model selected"), + } + } +} + +impl PartialEq for AiModel { + fn eq(&self, other: &Self) -> bool { + self.path == other.path + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct State { + #[serde(skip)] + devices_available: Option>, + #[serde(skip)] + pub selected_device: Device, + pub applied_device_config: DeviceConfigState, + pub modified_device_config: DeviceConfigState, + #[serde(skip, default = "all_subscriptions")] + // Want to resubscribe to api when app is reloaded + pub subscriptions: Vec, // Shown in ui + #[serde(skip)] + setting_subscriptions: bool, + #[serde(skip)] + pub backend_comms: BackendCommChannel, + #[serde(skip)] + poll_instant: Option, + #[serde(default = "default_neural_networks")] + pub neural_networks: Vec, + #[serde(skip)] + pub new_auto_add_entity_paths: Vec, // Used to force add space views when a new subscription appears +} + +#[inline] +fn all_subscriptions() -> Vec { + ChannelId::iter().collect_vec() +} + +#[inline] +fn default_neural_networks() -> Vec { + vec![ + AiModel::default(), + AiModel { + path: String::from("yolo-v3-tiny-tf"), + display_name: String::from("Yolo (tiny)"), + }, + AiModel { + path: String::from("mobilenet-ssd"), + display_name: String::from("MobileNet SSD"), + }, + AiModel { + path: String::from("face-detection-retail-0004"), + display_name: String::from("Face Detection"), + }, + AiModel { + path: String::from("age-gender-recognition-retail-0013"), + display_name: String::from("Age gender recognition"), + }, + ] +} + +impl Default for State { + fn default() -> Self { + Self { + devices_available: None, + selected_device: Device::default(), + applied_device_config: DeviceConfigState::default(), + modified_device_config: DeviceConfigState::default(), + subscriptions: ChannelId::iter().collect(), + setting_subscriptions: false, + backend_comms: BackendCommChannel::default(), + poll_instant: Some(Instant::now()), // No default for Instant + neural_networks: default_neural_networks(), + new_auto_add_entity_paths: Vec::new(), + } + } +} + +#[repr(u8)] +#[derive( + serde::Serialize, serde::Deserialize, Copy, Clone, PartialEq, Eq, fmt::Debug, Hash, EnumIter, +)] +pub enum ChannelId { + ColorImage, + LeftMono, + RightMono, + DepthImage, + PinholeCamera, + ImuData, +} + +use lazy_static::lazy_static; +lazy_static! { + static ref DEPTHAI_ENTITY_HASHES: HashMap = HashMap::from([ + ( + EntityPath::from("color/camera/rgb/Color camera").hash(), + ChannelId::ColorImage, + ), + ( + EntityPath::from("mono/camera/left_mono/Left mono").hash(), + ChannelId::LeftMono, + ), + ( + EntityPath::from("mono/camera/right_mono/Right mono").hash(), + ChannelId::RightMono, + ), + ( + EntityPath::from("color/camera/rgb/Depth").hash(), + ChannelId::DepthImage, + ), + ( + EntityPath::from("mono/camera/right_mono/Depth").hash(), + ChannelId::DepthImage, + ), + ( + EntityPath::from("mono/camera/left_mono/Depth").hash(), + ChannelId::DepthImage, + ), + ]); +} + +impl State { + /// Should the space view be added to the UI based on the new subscriptions (a subscription change occurred) + fn create_entity_paths_from_subscriptions( + &mut self, + new_subscriptions: &Vec, + ) -> Vec { + let mut new_entity_paths = Vec::new(); + for channel in new_subscriptions.iter() { + match channel { + ChannelId::ColorImage => { + new_entity_paths.push(EntityPath::from("color/camera/rgb/Color camera")); + } + ChannelId::LeftMono => { + new_entity_paths.push(EntityPath::from("mono/camera/left_mono/Left mono")); + } + ChannelId::RightMono => { + new_entity_paths.push(EntityPath::from("mono/camera/right_mono/Right mono")); + } + ChannelId::DepthImage => { + new_entity_paths.push(EntityPath::from("color/camera/rgb/Depth")); + new_entity_paths.push(EntityPath::from("mono/camera/right_mono/Depth")); + new_entity_paths.push(EntityPath::from("mono/camera/left_mono/Depth")); + } + _ => {} + } + } + new_entity_paths + } + + /// Get the entities (the row in the blueprint tree ui) that should be removed based on UI (e.g. if depth is disabled, remove the depth image) + pub fn entities_to_remove(&mut self, entity_path: &BTreeSet) -> Vec { + let mut remove_channels = Vec::::new(); + if self.applied_device_config.config.depth.is_none() { + remove_channels.push(ChannelId::DepthImage); + } + if !self + .applied_device_config + .config + .right_camera + .stream_enabled + { + remove_channels.push(ChannelId::RightMono); + } + if !self.applied_device_config.config.left_camera.stream_enabled { + remove_channels.push(ChannelId::LeftMono); + } + if !self + .applied_device_config + .config + .color_camera + .stream_enabled + { + remove_channels.push(ChannelId::ColorImage); + } + + entity_path + .iter() + .filter_map(|ep| { + if let Some(channel) = DEPTHAI_ENTITY_HASHES.get(&ep.hash()) { + if remove_channels.contains(channel) { + return Some(ep.clone()); + } + } + None + }) + .collect_vec() + } + + /// Set subscriptions based on the visible UI + pub fn set_subscriptions_from_space_views(&mut self, visible_space_views: Vec<&SpaceView>) { + // If any bool in the vec is true, the channel is currently visible in the ui somewhere + let mut visibilities = HashMap::>::from([ + (ChannelId::ColorImage, Vec::new()), + (ChannelId::LeftMono, Vec::new()), + (ChannelId::RightMono, Vec::new()), + (ChannelId::DepthImage, Vec::new()), + ]); + // Fill in visibilities + for space_view in visible_space_views.iter() { + let property_map = space_view.data_blueprint.data_blueprints_projected(); + for entity_path in space_view.data_blueprint.entity_paths().iter() { + if let Some(channel_id) = DEPTHAI_ENTITY_HASHES.get(&entity_path.hash()) { + if let Some(visibility) = visibilities.get_mut(channel_id) { + visibility.push(property_map.get(entity_path).visible); + } + } + } + } + + // First add subscriptions that don't have explicit enable disable buttons in the ui + let mut possible_subscriptions = Vec::::from([ChannelId::ImuData]); + // Now add subscriptions that should be possible in terms of ui + if self.applied_device_config.config.depth_enabled { + possible_subscriptions.push(ChannelId::DepthImage); + } + if self + .applied_device_config + .config + .color_camera + .stream_enabled + { + possible_subscriptions.push(ChannelId::ColorImage); + } + + if self.applied_device_config.config.left_camera.stream_enabled { + possible_subscriptions.push(ChannelId::LeftMono); + } + if self + .applied_device_config + .config + .right_camera + .stream_enabled + { + possible_subscriptions.push(ChannelId::RightMono); + } + + // Filter visibilities, include those that are currently visible and also possible (example pointcloud enabled == pointcloud possible) + let mut subscriptions = visibilities + .iter() + .filter_map(|(channel, vis)| { + if vis.iter().any(|x| *x) { + if possible_subscriptions.contains(channel) { + return Some(*channel); + } + } + None + }) + .collect_vec(); + + // Keep subscriptions that should be visible but have not yet been sent by the backend + for channel in ChannelId::iter() { + if !subscriptions.contains(&channel) + && possible_subscriptions.contains(&channel) + && self.subscriptions.contains(&channel) + { + subscriptions.push(channel); + } + } + + self.set_subscriptions(&subscriptions); + } + + pub fn set_subscriptions(&mut self, subscriptions: &Vec) { + if self.subscriptions.len() == subscriptions.len() + && self + .subscriptions + .iter() + .all(|channel_id| subscriptions.contains(channel_id)) + { + return; + } + self.backend_comms.set_subscriptions(subscriptions); + self.subscriptions = subscriptions.clone(); + } + + pub fn get_devices(&mut self) -> Vec { + // Return stored available devices or fetch them from the api (they get fetched every 30s via poller) + if let Some(devices) = self.devices_available.clone() { + return devices; + } + Vec::new() + } + + pub fn shutdown(&mut self) { + self.backend_comms.shutdown(); + } + + pub fn update(&mut self) { + if let Some(ws_message) = self.backend_comms.receive() { + re_log::debug!("Received message: {:?}", ws_message); + match ws_message.data { + WsMessageData::Subscriptions(subscriptions) => { + re_log::debug!("Setting subscriptions"); + self.new_auto_add_entity_paths = self.create_entity_paths_from_subscriptions( + &subscriptions + .iter() + .filter(|channel_id| !self.subscriptions.contains(channel_id)) + .cloned() + .collect_vec(), + ); + self.subscriptions = subscriptions; + } + WsMessageData::Devices(devices) => { + re_log::debug!("Setting devices..."); + self.devices_available = Some(devices); + } + WsMessageData::Pipeline(config) => { + let mut subs = self.subscriptions.clone(); + if let Some(depth) = config.depth { + subs.push(ChannelId::DepthImage); + } + if config.color_camera.stream_enabled { + subs.push(ChannelId::ColorImage); + } + if config.left_camera.stream_enabled { + subs.push(ChannelId::LeftMono); + } + if config.right_camera.stream_enabled { + subs.push(ChannelId::RightMono); + } + self.applied_device_config.config = config.clone(); + self.modified_device_config.config = config; + self.applied_device_config.config.depth_enabled = + self.applied_device_config.config.depth.is_some(); + self.modified_device_config.config.depth_enabled = + self.modified_device_config.config.depth.is_some(); + self.set_subscriptions(&subs); + self.applied_device_config.update_in_progress = false; + } + WsMessageData::Device(device) => { + re_log::debug!("Setting device: {device:?}"); + self.selected_device = device; + self.backend_comms.set_subscriptions(&self.subscriptions); + self.backend_comms + .set_pipeline(&self.applied_device_config.config); + self.applied_device_config.update_in_progress = true; + } + WsMessageData::Error(error) => { + re_log::error!("Error: {:}", error.message); + self.applied_device_config.update_in_progress = false; + match error.action { + ErrorAction::None => (), + ErrorAction::FullReset => { + self.set_device("".into()); + } + } + } + _ => {} + } + } + + if let Some(poll_instant) = self.poll_instant { + if poll_instant.elapsed().as_secs() < 2 { + return; + } + if self.selected_device.id == "" { + self.backend_comms.get_devices(); + } + self.poll_instant = Some(Instant::now()); + } else { + self.poll_instant = Some(Instant::now()); + } + } + + pub fn set_device(&mut self, device_id: DeviceId) { + if self.selected_device.id == device_id { + return; + } + re_log::debug!("Setting device: {:?}", device_id); + self.backend_comms.set_device(device_id); + } + + pub fn set_device_config(&mut self, config: &mut DeviceConfig) { + // Don't try to set pipeline in ws not connected or device not selected + if !self + .backend_comms + .ws + .connected + .load(std::sync::atomic::Ordering::SeqCst) + || self.selected_device.id == "" + { + return; + } + config.left_camera.board_socket = BoardSocket::LEFT; + config.right_camera.board_socket = BoardSocket::RIGHT; + if !config.depth_enabled { + config.depth = None; + } + self.backend_comms.set_pipeline(&config); + re_log::info!("Creating pipeline..."); + self.applied_device_config.update_in_progress = true; + } +} + +pub type DeviceId = String; diff --git a/crates/re_viewer/src/depthai/mod.rs b/crates/re_viewer/src/depthai/mod.rs new file mode 100644 index 000000000000..e20c7a80a1dc --- /dev/null +++ b/crates/re_viewer/src/depthai/mod.rs @@ -0,0 +1,3 @@ +mod api; +pub mod depthai; +mod ws; diff --git a/crates/re_viewer/src/depthai/ws.rs b/crates/re_viewer/src/depthai/ws.rs new file mode 100644 index 000000000000..6b8013dc1ecd --- /dev/null +++ b/crates/re_viewer/src/depthai/ws.rs @@ -0,0 +1,246 @@ +use crossbeam_channel::{self, Receiver, Sender}; +use ewebsock::{WsEvent, WsMessage}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::ops::ControlFlow; +use std::process::exit; +use std::sync::atomic::AtomicBool; +use std::sync::Arc; + +use super::depthai; + +// TODO(filip) make this try to reconnect until a successful connection +async fn spawn_ws_client( + recv_tx: crossbeam_channel::Sender, + send_rx: crossbeam_channel::Receiver, + shutdown: Arc, + connected: Arc, +) { + let (error_tx, error_rx) = crossbeam_channel::unbounded(); + // Retry connection until successful + loop { + let recv_tx = recv_tx.clone(); + let error_tx = error_tx.clone(); + let connected = connected.clone(); + if let Ok(sender) = ewebsock::ws_connect( + String::from("ws://localhost:9001"), + Box::new(move |event| { + match event { + WsEvent::Opened => { + re_log::info!("Websocket opened"); + connected.store(true, std::sync::atomic::Ordering::SeqCst); + ControlFlow::Continue(()) + } + WsEvent::Message(message) => { + // re_log::debug!("Websocket message"); + recv_tx.send(message); + ControlFlow::Continue(()) + } + WsEvent::Error(e) => { + // re_log::info!("Websocket Error: {:?}", e); + connected.store(false, std::sync::atomic::Ordering::SeqCst); + error_tx.send(e); + ControlFlow::Break(()) + } + WsEvent::Closed => { + // re_log::info!("Websocket Closed"); + error_tx.send(String::from("Websocket Closed")); + ControlFlow::Break(()) + } + } + }), + ) + .as_mut() + { + while error_rx.is_empty() { + if shutdown.load(std::sync::atomic::Ordering::SeqCst) { + re_log::debug!("Shutting down websocket client"); + exit(0); + } + if let Ok(message) = send_rx.recv_timeout(std::time::Duration::from_millis(100)) { + re_log::debug!("Sending message: {:?}", message); + sender.send(message); + } + } + for error in error_rx.try_iter() { + re_log::debug!("Websocket error: {:}", error); + } + } else { + re_log::error!("Coudln't create websocket"); + } + if shutdown.load(std::sync::atomic::Ordering::SeqCst) { + re_log::debug!("Shutting down websocket client"); + exit(0); + } + std::thread::sleep(std::time::Duration::from_secs(1)); + } +} + +#[derive(Serialize, Deserialize, fmt::Debug)] +pub enum WsMessageData { + Subscriptions(Vec), + Devices(Vec), + Device(depthai::Device), + Pipeline(depthai::DeviceConfig), + Error(depthai::Error), +} + +#[derive(Deserialize, Serialize, fmt::Debug)] +pub enum WsMessageType { + Subscriptions, + Devices, + Device, + Pipeline, + Error, +} + +impl Default for WsMessageType { + fn default() -> Self { + Self::Error + } +} + +// TODO(filip): Perhaps add a "message" field to all messages to display toasts +#[derive(Serialize, fmt::Debug)] +pub struct BackWsMessage { + #[serde(rename = "type")] + pub kind: WsMessageType, + pub data: WsMessageData, +} + +impl<'de> Deserialize<'de> for BackWsMessage { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + pub struct Message { + #[serde(rename = "type")] + pub kind: WsMessageType, + pub data: serde_json::Value, + } + + let message = Message::deserialize(deserializer)?; + let data = match message.kind { + WsMessageType::Subscriptions => WsMessageData::Subscriptions( + serde_json::from_value(message.data).unwrap_or_default(), + ), + WsMessageType::Devices => { + WsMessageData::Devices(serde_json::from_value(message.data).unwrap_or_default()) + } + WsMessageType::Device => { + WsMessageData::Device(serde_json::from_value(message.data).unwrap_or_default()) + } + WsMessageType::Pipeline => { + WsMessageData::Pipeline(serde_json::from_value(message.data).unwrap()) + // TODO(filip) change to unwrap_or_default when pipeline config api is more stable + } + WsMessageType::Error => { + WsMessageData::Error(serde_json::from_value(message.data).unwrap_or_default()) + } + }; + Ok(Self { + kind: message.kind, + data, + }) + } +} + +impl Default for BackWsMessage { + fn default() -> Self { + Self { + kind: WsMessageType::Error.into(), + data: WsMessageData::Error(depthai::Error::default()), + } + } +} + +pub struct WebSocket { + receiver: crossbeam_channel::Receiver, + sender: crossbeam_channel::Sender, + shutdown: Arc, + task: tokio::task::JoinHandle<()>, + pub connected: Arc, +} + +impl Default for WebSocket { + fn default() -> Self { + Self::new() + } +} + +impl WebSocket { + pub fn new() -> Self { + re_log::debug!("Creating websocket client"); + let (recv_tx, recv_rx) = crossbeam_channel::unbounded(); + let (send_tx, send_rx) = crossbeam_channel::unbounded(); + let shutdown = Arc::new(AtomicBool::new(false)); + let shutdown_clone = shutdown.clone(); + let connected = Arc::new(AtomicBool::new(false)); + let connected_clone = connected.clone(); + let mut task = None; + if let Ok(handle) = tokio::runtime::Handle::try_current() { + re_log::debug!("Using current tokio runtime"); + task = Some(handle.spawn(spawn_ws_client( + recv_tx, + send_rx, + shutdown_clone, + connected_clone, + ))); + } else { + re_log::debug!("Creating new tokio runtime"); + task = Some( + tokio::runtime::Builder::new_current_thread() + .build() + .unwrap() + .spawn(spawn_ws_client( + recv_tx, + send_rx, + shutdown_clone, + connected_clone, + )), + ); + } + Self { + receiver: recv_rx, + sender: send_tx, + shutdown, + task: task.unwrap(), + connected, + } + } + + pub fn shutdown(&mut self) { + self.shutdown + .store(true, std::sync::atomic::Ordering::SeqCst); + } + + pub fn receive(&self) -> Option { + if let Ok(message) = self.receiver.try_recv() { + match message { + WsMessage::Text(text) => { + re_log::debug!("Received: {:?}", text); + match serde_json::from_str::(&text.as_str()) { + Ok(back_message) => { + return Some(back_message); + } + Err(err) => { + re_log::error!("Error: {:}", err); + return None; + } + } + } + _ => return None, + } + } + None + } + + pub fn send(&self, message: String) { + self.sender.send(WsMessage::Text(message)); + // TODO(filip): This is a hotfix for the websocket not sending the message + // This doesn't actually send any message, but it makes the websocket actually send the message previous msg + // It has to be something related to tokio::spawn, because it works fine when just running in the current thread + self.sender.send(WsMessage::Text("".to_string())); + } +} diff --git a/crates/re_viewer/src/gpu_bridge/mod.rs b/crates/re_viewer/src/gpu_bridge/mod.rs index 176f28149dee..9cf35e1d4c01 100644 --- a/crates/re_viewer/src/gpu_bridge/mod.rs +++ b/crates/re_viewer/src/gpu_bridge/mod.rs @@ -9,7 +9,9 @@ use egui::mutex::Mutex; use re_renderer::{ renderer::{ColormappedTexture, RectangleOptions}, - resource_managers::{GpuTexture2D, Texture2DCreationDesc}, + resource_managers::{ + GpuTexture2D, Texture2DCreationDesc, TextureCreationError, TextureManager2DError, + }, RenderContext, ViewBuilder, }; @@ -52,12 +54,12 @@ pub fn viewport_resolution_in_pixels(clip_rect: egui::Rect, pixels_from_point: f [resolution.x as u32, resolution.y as u32] } -pub fn try_get_or_create_texture<'a, Err>( +pub fn try_get_or_create_texture<'a, Err: std::fmt::Display>( render_ctx: &mut RenderContext, texture_key: u64, try_create_texture_desc: impl FnOnce() -> Result, Err>, -) -> Result { - render_ctx.texture_manager_2d.get_or_create_with( +) -> Result> { + render_ctx.texture_manager_2d.get_or_try_create_with( texture_key, &mut render_ctx.gpu_resources.textures, try_create_texture_desc, @@ -68,17 +70,12 @@ pub fn get_or_create_texture<'a>( render_ctx: &mut RenderContext, texture_key: u64, create_texture_desc: impl FnOnce() -> Texture2DCreationDesc<'a>, -) -> GpuTexture2D { - enum Never {} - let result: Result = render_ctx.texture_manager_2d.get_or_create_with( +) -> Result { + render_ctx.texture_manager_2d.get_or_create_with( texture_key, &mut render_ctx.gpu_resources.textures, - || Ok(create_texture_desc()), - ); - match result { - Ok(handle) => handle, - Err(never) => match never {}, - } + create_texture_desc, + ) } /// Render a `re_render` view using the given clip rectangle. diff --git a/crates/re_viewer/src/gpu_bridge/tensor_to_gpu.rs b/crates/re_viewer/src/gpu_bridge/tensor_to_gpu.rs index adcd47dd0922..7787f6c50f15 100644 --- a/crates/re_viewer/src/gpu_bridge/tensor_to_gpu.rs +++ b/crates/re_viewer/src/gpu_bridge/tensor_to_gpu.rs @@ -1,5 +1,6 @@ //! Upload [`Tensor`] to [`re_renderer`]. +use anyhow::Context; use std::borrow::Cow; use bytemuck::{allocation::pod_collect_to_vec, cast_slice, Pod}; @@ -94,7 +95,8 @@ fn color_tensor_to_gpu( width, height, }) - })?; + }) + .map_err(|err| anyhow::anyhow!("Failed to create texture for color tensor: {err}"))?; let texture_format = texture_handle.format(); @@ -110,7 +112,7 @@ fn color_tensor_to_gpu( crate::gpu_bridge::range(tensor_stats)? }; - let color_mapper = if texture_format.describe().components == 1 { + let color_mapper = if re_renderer::texture_info::num_texture_components(texture_format) == 1 { // Single-channel images = luminance = grayscale Some(ColorMapper::Function(re_renderer::Colormap::Grayscale)) } else { @@ -151,14 +153,13 @@ fn class_id_tensor_to_gpu( .ok_or_else(|| anyhow::anyhow!("compressed_tensor!?"))?; anyhow::ensure!(0.0 <= min, "Negative class id"); - // create a lookup texture for the colors that's 256 wide, - // and as many rows as needed to fit all the classes. - anyhow::ensure!(max <= 65535.0, "Too many class ids"); + anyhow::ensure!(max <= 65535.0, "Too many class ids"); // we only support u8 and u16 tensors // We pack the colormap into a 2D texture so we don't go over the max texture size. // We only support u8 and u16 class ids, so 256^2 is the biggest texture we need. + let num_colors = (max + 1.0) as usize; let colormap_width = 256; - let colormap_height = (max as usize + colormap_width - 1) / colormap_width; + let colormap_height = (num_colors + colormap_width - 1) / colormap_width; let colormap_texture_handle = get_or_create_texture(render_ctx, hash(annotations.row_id), || { @@ -179,11 +180,13 @@ fn class_id_tensor_to_gpu( width: colormap_width as u32, height: colormap_height as u32, } - }); + }) + .context("Failed to create class_id_colormap.")?; let main_texture_handle = try_get_or_create_texture(render_ctx, hash(tensor.id()), || { general_texture_creation_desc_from_tensor(debug_name, tensor) - })?; + }) + .map_err(|err| anyhow::anyhow!("Failed to create texture for class id tensor: {err}"))?; Ok(ColormappedTexture { texture: main_texture_handle, @@ -212,7 +215,8 @@ fn depth_tensor_to_gpu( let texture = try_get_or_create_texture(render_ctx, hash(tensor.id()), || { general_texture_creation_desc_from_tensor(debug_name, tensor) - })?; + }) + .map_err(|err| anyhow::anyhow!("Failed to create depth tensor texture: {err}"))?; Ok(ColormappedTexture { texture, diff --git a/crates/re_viewer/src/lib.rs b/crates/re_viewer/src/lib.rs index 85a2939b3160..864b4dc594fd 100644 --- a/crates/re_viewer/src/lib.rs +++ b/crates/re_viewer/src/lib.rs @@ -1,9 +1,10 @@ -//! Rerun Viewer GUI. +//! Depthai Viewer GUI. //! -//! This crate contains all the GUI code for the Rerun Viewer, +//! This crate contains all the GUI code for the Depthai Viewer, //! including all 2D and 3D visualization code. mod app; +mod depthai; pub mod env_vars; pub(crate) mod gpu_bridge; pub mod math; @@ -115,11 +116,7 @@ impl AppEnvironment { // --------------------------------------------------------------------------- #[allow(dead_code)] -const APPLICATION_NAME: &str = "Rerun Viewer"; - -pub(crate) fn hardware_tier() -> re_renderer::config::HardwareTier { - re_renderer::config::HardwareTier::default() -} +const APPLICATION_NAME: &str = "Depthai Viewer"; pub(crate) fn wgpu_options() -> egui_wgpu::WgpuConfiguration { egui_wgpu::WgpuConfiguration { @@ -141,10 +138,8 @@ pub(crate) fn wgpu_options() -> egui_wgpu::WgpuConfiguration { egui_wgpu::SurfaceErrorAction::SkipFrame } }), - backends: re_renderer::config::supported_backends(), - device_descriptor: crate::hardware_tier().device_descriptor(), - // TODO(andreas): This should be the default for egui-wgpu. - power_preference: wgpu::util::power_preference_from_env().unwrap_or(wgpu::PowerPreference::HighPerformance), + supported_backends: re_renderer::config::supported_backends(), + device_descriptor: std::sync::Arc::new(|adapter| re_renderer::config::HardwareTier::from_adapter(adapter).device_descriptor()), ..Default::default() } } @@ -157,11 +152,14 @@ pub(crate) fn customize_eframe(cc: &eframe::CreationContext<'_>) -> re_ui::ReUi let paint_callback_resources = &mut render_state.renderer.write().paint_callback_resources; paint_callback_resources.insert(RenderContext::new( + &render_state.adapter, render_state.device.clone(), render_state.queue.clone(), RenderContextConfig { output_format_color: render_state.target_format, - hardware_tier: crate::hardware_tier(), + hardware_tier: re_renderer::config::HardwareTier::from_adapter( + &render_state.adapter, + ), }, )); } diff --git a/crates/re_viewer/src/misc/viewer_context.rs b/crates/re_viewer/src/misc/viewer_context.rs index 90fb0b40ea25..93584b9c254a 100644 --- a/crates/re_viewer/src/misc/viewer_context.rs +++ b/crates/re_viewer/src/misc/viewer_context.rs @@ -11,6 +11,8 @@ use super::{ HoverHighlight, }; +use crate::depthai::depthai; + /// Common things needed by many parts of the viewer. pub struct ViewerContext<'a> { /// Global options for the whole viewer. @@ -32,6 +34,7 @@ pub struct ViewerContext<'a> { pub re_ui: &'a re_ui::ReUi, pub render_ctx: &'a mut re_renderer::RenderContext, + pub depthai_state: &'a mut depthai::State, } impl<'a> ViewerContext<'a> { diff --git a/crates/re_viewer/src/native.rs b/crates/re_viewer/src/native.rs index c365c5829dc3..48f88d705205 100644 --- a/crates/re_viewer/src/native.rs +++ b/crates/re_viewer/src/native.rs @@ -1,6 +1,7 @@ use re_log_types::LogMsg; use crate::APPLICATION_NAME; +use pyo3::prelude::*; type AppCreator = Box, re_ui::ReUi) -> Box>; @@ -20,7 +21,7 @@ pub fn run_native_app(app_creator: AppCreator) -> eframe::Result<()> { transparent: re_ui::CUSTOM_WINDOW_DECORATIONS, follow_system_theme: false, - default_theme: eframe::Theme::Dark, + default_theme: eframe::Theme::Light, renderer: eframe::Renderer::Wgpu, wgpu_options: crate::wgpu_options(), diff --git a/crates/re_viewer/src/ui/auto_layout.rs b/crates/re_viewer/src/ui/auto_layout.rs index 36778aa689f3..6860da1c6922 100644 --- a/crates/re_viewer/src/ui/auto_layout.rs +++ b/crates/re_viewer/src/ui/auto_layout.rs @@ -78,6 +78,7 @@ pub(crate) fn tree_from_space_views( ViewCategory::Tensor | ViewCategory::TimeSeries => Some(1.0), // Not sure if we should do `None` here. ViewCategory::Text => Some(2.0), // Make text logs wide ViewCategory::BarChart => None, + ViewCategory::NodeGraph => Some(2.0), // Make node graphs wide }; SpaceMakeInfo { diff --git a/crates/re_viewer/src/ui/blueprint.rs b/crates/re_viewer/src/ui/blueprint.rs index 8a173ac58883..7eaa63f73b40 100644 --- a/crates/re_viewer/src/ui/blueprint.rs +++ b/crates/re_viewer/src/ui/blueprint.rs @@ -91,7 +91,7 @@ impl Blueprint { .show_inside(ui, |ui| { ui.horizontal_centered(|ui| { ui.strong("Blueprint").on_hover_text( - "The Blueprint is where you can configure the Rerun Viewer.", + "The Blueprint is where you can configure the Depthai Viewer.", ); ui.allocate_ui_with_layout( diff --git a/crates/re_viewer/src/ui/data_ui/image.rs b/crates/re_viewer/src/ui/data_ui/image.rs index 630c011513b7..b187d16892d9 100644 --- a/crates/re_viewer/src/ui/data_ui/image.rs +++ b/crates/re_viewer/src/ui/data_ui/image.rs @@ -5,13 +5,13 @@ use re_log_types::{ component_types::{ClassId, Tensor, TensorDataMeaning}, DecodedTensor, TensorElement, }; -use re_renderer::renderer::ColormappedTexture; -use re_ui::ReUi; use crate::{ misc::{caches::TensorStats, ViewerContext}, ui::annotations::AnnotationMap, }; +use re_renderer::renderer::ColormappedTexture; +use re_ui::ReUi; use super::{EntityDataUi, UiVerbosity}; diff --git a/crates/re_viewer/src/ui/memory_panel.rs b/crates/re_viewer/src/ui/memory_panel.rs index 796c98977a9d..5c61be30e84a 100644 --- a/crates/re_viewer/src/ui/memory_panel.rs +++ b/crates/re_viewer/src/ui/memory_panel.rs @@ -57,7 +57,7 @@ impl MemoryPanel { }); egui::CentralPanel::default().show_inside(ui, |ui| { - ui.label("🗠 Rerun Viewer memory use over time"); + ui.label("🗠 Depthai Viewer memory use over time"); self.plot(ui, limit); }); } @@ -69,7 +69,7 @@ impl MemoryPanel { store_config: &DataStoreConfig, store_stats: &DataStoreStats, ) { - ui.strong("Rerun Viewer resource usage"); + ui.strong("Depthai Viewer resource usage"); ui.separator(); ui.collapsing("CPU Resources", |ui| { diff --git a/crates/re_viewer/src/ui/mod.rs b/crates/re_viewer/src/ui/mod.rs index da730d21b5e7..f07ba8c6c466 100644 --- a/crates/re_viewer/src/ui/mod.rs +++ b/crates/re_viewer/src/ui/mod.rs @@ -10,6 +10,7 @@ mod space_view_entity_picker; mod space_view_heuristics; mod view_bar_chart; mod view_category; +mod view_node_graph; mod view_tensor; mod view_text; mod view_time_series; diff --git a/crates/re_viewer/src/ui/selection_panel.rs b/crates/re_viewer/src/ui/selection_panel.rs index f84b282b4b6a..8931d16d22cb 100644 --- a/crates/re_viewer/src/ui/selection_panel.rs +++ b/crates/re_viewer/src/ui/selection_panel.rs @@ -1,25 +1,497 @@ -use egui::NumExt as _; +use egui::{ + plot::{Line, Plot, PlotPoints}, + NumExt as _, +}; use re_data_store::{ query_latest_single, ColorMapper, Colormap, EditableAutoValue, EntityPath, EntityProperties, + ExtraQueryHistory, }; + +use itertools::Itertools; +use re_arrow_store::{LatestAtQuery, TimeInt, TimeRange, Timeline}; use re_log_types::{ - component_types::{Tensor, TensorDataMeaning}, - TimeType, Transform, + component_types::{ImuData, InstanceKey, Tensor, TensorDataMeaning}, + Component, TimeType, Transform, }; +use re_query::{query_primary_with_history, QueryError}; use crate::{ + depthai::depthai, + misc::SpaceViewHighlights, ui::{view_spatial::SpatialNavigationMode, Blueprint}, Item, UiVerbosity, ViewerContext, }; -use super::{data_ui::DataUi, space_view::ViewState}; +use egui_dock::{DockArea, NodeIndex, Tree}; + +use super::{data_ui::DataUi, space_view::ViewState, SpaceView, ViewCategory}; + +use egui::emath::History; +use strum::EnumIter; +use strum::IntoEnumIterator; // Needed for enum::iter() // --- +#[derive(Debug, Copy, Clone, EnumIter)] +enum XYZ { + X, + Y, + Z, +} + +#[derive(Debug, Copy, Clone)] +enum ImuTabKind { + Accel, + Gyro, + Mag, +} + +struct DepthaiTabs<'a, 'b> { + ctx: &'a mut ViewerContext<'b>, + accel_history: &'a mut History<[f32; 3]>, + gyro_history: &'a mut History<[f32; 3]>, + magnetometer_history: &'a mut History<[f32; 3]>, + now: f64, // Time elapsed from spawning SelectionPanel + unsubscribe_from_imu: bool, + imu_visible: &'a mut bool, + apply_button_enabled: &'a mut bool, +} + +impl<'a, 'b> DepthaiTabs<'a, 'b> { + pub fn tree() -> Tree { + let config_tab = "Configuration".to_string(); + let imu_tab = "IMU".to_string(); + let tree = Tree::new(vec![config_tab, imu_tab]); + tree + } + + fn device_configuration_ui(&mut self, ui: &mut egui::Ui) { + // re_log::info!("pipeline_state: {:?}", pipeline_state); + let mut device_config = self.ctx.depthai_state.modified_device_config.config.clone(); + let available_size = ui.available_size(); + egui::ScrollArea::both() + .auto_shrink([false; 2]) + .show(ui, |ui| { + egui::Frame { + inner_margin: egui::Margin::same(re_ui::ReUi::view_padding()), + ..Default::default() + } + .show(ui, |ui| { + ui.vertical(|ui| { + ui.collapsing("Color Camera", |ui| { + ui.vertical(|ui| { + ui.horizontal(|ui| { + ui.label("Resolution: "); + egui::ComboBox::from_id_source("color_camera_resolution") + .selected_text(format!( + "{}", + device_config.color_camera.resolution + )) + .width(100.0) + .show_ui(ui, |ui| { + for res in self + .ctx + .depthai_state + .selected_device + .supported_color_resolutions + .iter() + { + ui.selectable_value( + &mut device_config.color_camera.resolution, + *res, + format!("{res}"), + ); + } + }); + }); + ui.horizontal(|ui| { + ui.label("FPS: "); + ui.add(egui::DragValue::new( + &mut device_config.color_camera.fps, + )); + }); + ui.checkbox( + &mut device_config.color_camera.stream_enabled, + "Stream", + ); + }); + }); + ui.collapsing("Left Mono Camera", |ui| { + ui.vertical(|ui| { + ui.horizontal(|ui| { + ui.label("Resolution: "); + egui::ComboBox::from_id_source("left_camera_resolution") + .width(70.0) + .selected_text(format!( + "{}", + device_config.left_camera.resolution + )) + .show_ui(ui, |ui| { + for res in self + .ctx + .depthai_state + .selected_device + .supported_left_mono_resolutions + .iter() + { + ui.selectable_value( + &mut device_config.left_camera.resolution, + *res, + format!("{res}"), + ); + } + }); + }); + ui.horizontal(|ui| { + ui.label("FPS: "); + ui.add(egui::DragValue::new( + &mut device_config.left_camera.fps, + )); + }); + ui.checkbox( + &mut device_config.left_camera.stream_enabled, + "Stream", + ); + }); + }); + ui.collapsing("Right Mono Camera", |ui| { + ui.vertical(|ui| { + ui.horizontal(|ui| { + ui.label("Resolution: "); + egui::ComboBox::from_id_source("right_camera_resolution") + .width(70.0) + .selected_text(format!( + "{}", + device_config.right_camera.resolution + )) + .show_ui(ui, |ui| { + for res in self + .ctx + .depthai_state + .selected_device + .supported_right_mono_resolutions + .iter() + { + ui.selectable_value( + &mut device_config.right_camera.resolution, + *res, + format!("{res}"), + ); + } + }); + }); + ui.horizontal(|ui| { + ui.label("FPS: "); + ui.add(egui::DragValue::new( + &mut device_config.right_camera.fps, + )); + }); + ui.checkbox( + &mut device_config.right_camera.stream_enabled, + "Stream", + ); + }); + }); + ui.checkbox(&mut device_config.depth_enabled, "Depth"); + + let mut depth = device_config.depth.unwrap_or_default(); + if depth.align == depthai::BoardSocket::RGB && !depth.lr_check { + depth.align = depthai::BoardSocket::AUTO; + } + ui.collapsing("Depth settings", |ui| { + ui.vertical(|ui| { + ui.checkbox(&mut depth.lr_check, "LR Check"); + ui.horizontal(|ui| { + ui.label("Align to: "); + egui::ComboBox::from_id_source("depth_align_combo") + .width(100.0) + .selected_text(format!("{:?}", depth.align)) + .show_ui(ui, |ui| { + for align in depthai::BoardSocket::iter() { + if align == depthai::BoardSocket::RGB + && !depth.lr_check + { + continue; + } + ui.selectable_value( + &mut depth.align, + align, + format!("{:?}", align), + ); + } + }); + }); + ui.horizontal(|ui| { + ui.label("Median Filter: "); + egui::ComboBox::from_id_source("median_filter_combo") + .width(100.0) + .selected_text(format!("{:?}", depth.median)) + .show_ui(ui, |ui| { + for filter in depthai::DepthMedianFilter::iter() { + ui.selectable_value( + &mut depth.median, + filter, + format!("{:?}", filter), + ); + } + }); + }); + + ui.horizontal(|ui| { + ui.label("LR Threshold: "); + ui.add( + egui::DragValue::new(&mut depth.lrc_threshold) + .clamp_range(0..=10), + ); + }); + + ui.checkbox(&mut depth.extended_disparity, "Extended Disparity"); + ui.checkbox(&mut depth.subpixel_disparity, "Subpixel Disparity"); + ui.horizontal(|ui| { + ui.label("Sigma: "); + ui.add( + egui::DragValue::new(&mut depth.sigma) + .clamp_range(0..=65535), + ); + }); + ui.horizontal(|ui| { + ui.label("Confidence: "); + ui.add( + egui::DragValue::new(&mut depth.confidence) + .clamp_range(0..=255), + ) + }); + }); + }); + + ui.vertical(|ui| { + ui.label("AI Model:"); + egui::ComboBox::from_id_source("ai_model_selection") + .width(120.0) + .selected_text(format!("{}", device_config.ai_model.display_name)) + .show_ui(ui, |ui| { + for nn in self.ctx.depthai_state.neural_networks.iter() { + ui.selectable_value( + &mut device_config.ai_model, + nn.clone(), + &nn.display_name, + ); + } + }); + }); + device_config.depth = Some(depth); + self.ctx.depthai_state.modified_device_config.config = + device_config.clone(); + ui.horizontal(|ui| { + let apply_enabled = device_config + != self.ctx.depthai_state.applied_device_config.config + && !self.ctx.depthai_state.selected_device.id.is_empty(); + + ui.add_enabled_ui(apply_enabled, |ui| { + ui.scope(|ui| { + let mut style = ui.style_mut().clone(); + if apply_enabled { + let color = self.ctx.re_ui.design_tokens.primary_bg_color; + let hover_color = + self.ctx.re_ui.design_tokens.primary_hover_bg_color; + style.visuals.widgets.hovered.bg_fill = hover_color; + style.visuals.widgets.hovered.weak_bg_fill = hover_color; + style.visuals.widgets.inactive.bg_fill = color; + style.visuals.widgets.inactive.weak_bg_fill = color; + style.visuals.widgets.inactive.fg_stroke.color = + egui::Color32::WHITE; + style.visuals.widgets.hovered.fg_stroke.color = + egui::Color32::WHITE; + } + style.spacing.button_padding = egui::Vec2::new(24.0, 2.0); + ui.set_style(style); + if ui.button("Apply").clicked() { + self.ctx + .depthai_state + .set_device_config(&mut device_config); + } + }); + }); + }); + }); + }); + }); + } + + fn imu_ui(&mut self, ui: &mut egui::Ui) { + let imu_entity_path = &ImuData::entity_path(); + + if let Ok(latest) = re_query::query_entity_with_primary::( + &self.ctx.log_db.entity_db.data_store, + &LatestAtQuery::new(Timeline::log_time(), TimeInt::MAX), + imu_entity_path, + &[ImuData::name()], + ) { + latest.visit1(|_inst, imu_data| { + self.accel_history.add( + self.now, + [imu_data.accel.x, imu_data.accel.y, imu_data.accel.z], + ); + self.gyro_history.add( + self.now, + [imu_data.gyro.x, imu_data.gyro.y, imu_data.gyro.z], + ); + if let Some(mag) = imu_data.mag { + self.magnetometer_history + .add(self.now, [mag.x, mag.y, mag.z]); + } + }); + } + + let tab_kinds = [ImuTabKind::Accel, ImuTabKind::Gyro, ImuTabKind::Mag]; + egui::ScrollArea::both().show(ui, |ui| { + let max_width = ui.available_width(); + for kind in tab_kinds.iter() { + self.xyz_plot_ui(ui, *kind, max_width); + } + }); + } + + fn xyz_plot_ui(&mut self, ui: &mut egui::Ui, kind: ImuTabKind, max_width: f32) { + ui.vertical(|ui| { + let (history, display_name, unit) = match kind { + ImuTabKind::Accel => (&mut self.accel_history, "Accelerometer", "(m/s^2)"), + ImuTabKind::Gyro => (&mut self.gyro_history, "Gyroscope", "(rad/s)"), + ImuTabKind::Mag => (&mut self.magnetometer_history, "Magnetometer", "(uT)"), + }; + let Some(latest) = history.latest() else { + ui.label(format!("No {display_name} data yet")); + return; + }; + ui.label(display_name); + ui.add_sized([max_width, 150.0], |ui: &mut egui::Ui| { + ui.horizontal(|ui| { + for axis in XYZ::iter() { + ui.add_sized([max_width / 3.0, 150.0], |ui: &mut egui::Ui| { + Plot::new(format!("{:?} ({axis:?})", kind)) + .allow_drag(false) + .allow_zoom(false) + .allow_scroll(false) + .show(ui, |plot_ui| { + plot_ui.line(Line::new(PlotPoints::new( + (*history) + .iter() + .map(|(t, v)| [t, v[axis as usize].into()]) + .collect_vec(), + ))); + }) + .response + }); + } + }) + .response + }); + + ui.label(format!( + "{display_name}: ({:.2}, {:.2}, {:.2}) {unit}", + latest[0], latest[1], latest[2] + )); + }); + } +} + +impl<'a, 'b> egui_dock::TabViewer for DepthaiTabs<'a, 'b> { + type Tab = String; + + fn ui(&mut self, ui: &mut egui::Ui, tab: &mut Self::Tab) { + match tab.as_str() { + "Configuration" => { + // Unsubscribe from IMU data if subscribed + if self.unsubscribe_from_imu + && self + .ctx + .depthai_state + .subscriptions + .contains(&depthai::ChannelId::ImuData) + { + let mut subs = self + .ctx + .depthai_state + .subscriptions + .iter() + .filter_map(|x| { + if x != &depthai::ChannelId::ImuData { + return Some(x.clone()); + } else { + return None; + } + }) + .collect_vec(); + self.ctx.depthai_state.set_subscriptions(&subs); + self.accel_history.clear(); + self.gyro_history.clear(); + self.magnetometer_history.clear(); + } + self.device_configuration_ui(ui); + } + "IMU" => { + *self.imu_visible = true; + // Subscribe to IMU data if not already subscribed + if !self + .ctx + .depthai_state + .subscriptions + .contains(&depthai::ChannelId::ImuData) + { + let mut subs = self.ctx.depthai_state.subscriptions.clone(); + subs.push(depthai::ChannelId::ImuData); + self.ctx.depthai_state.set_subscriptions(&subs); + } + self.imu_ui(ui); + } + _ => {} + } + } + + fn title(&mut self, tab: &mut Self::Tab) -> egui::WidgetText { + tab.as_str().into() + } +} + /// The "Selection View" side-bar. -#[derive(Default, serde::Deserialize, serde::Serialize)] +#[derive(serde::Deserialize, serde::Serialize)] #[serde(default)] -pub(crate) struct SelectionPanel {} +pub(crate) struct SelectionPanel { + #[serde(skip)] + depthai_tabs: Tree, + #[serde(skip)] + accel_history: History<[f32; 3]>, + #[serde(skip)] + gyro_history: History<[f32; 3]>, + #[serde(skip)] + magnetometer_history: History<[f32; 3]>, + #[serde(skip)] + start_time: instant::Instant, + #[serde(skip)] + current_device_config_panel_min_height: f32, // A bit hacky, used to keep the top panel from becoming really small after showing spinner + #[serde(skip)] + device_config_panel_height: f32, // Used to reset height to previous height after config load + #[serde(skip)] + imu_tab_visible: bool, // Used to subscribe to IMU data when the imu tab is shown, or rather unsubscribe when it's not (enables the user to view both the imu and the configuration at the same time) + #[serde(skip)] + apply_cfg_button_enabled: bool, // Used to disable the apply button when the config has changed, keeps the state between frames +} + +impl Default for SelectionPanel { + fn default() -> Self { + Self { + depthai_tabs: DepthaiTabs::tree(), + accel_history: History::new(0..1000, 5.0), + gyro_history: History::new(0..1000, 5.0), + magnetometer_history: History::new(0..1000, 5.0), + start_time: instant::Instant::now(), + current_device_config_panel_min_height: 0.0, + device_config_panel_height: 500.0, + imu_tab_visible: false, + apply_cfg_button_enabled: false, + } + } +} impl SelectionPanel { #[allow(clippy::unused_self)] @@ -45,33 +517,138 @@ impl SelectionPanel { ui, blueprint.selection_panel_expanded, |ui: &mut egui::Ui| { - egui::TopBottomPanel::top("selection_panel_title_bar") - .exact_height(re_ui::ReUi::title_bar_height()) + let response_rect = egui::TopBottomPanel::top("Device configuration") + .resizable(true) + .min_height(self.current_device_config_panel_min_height) + .show_separator_line(true) .frame(egui::Frame { - inner_margin: egui::Margin::symmetric(re_ui::ReUi::view_padding(), 0.0), + inner_margin: egui::Margin::symmetric( + re_ui::ReUi::view_padding(), + re_ui::ReUi::view_padding(), + ), ..Default::default() }) .show_inside(ui, |ui| { - if let Some(selection) = ctx - .rec_cfg - .selection_state - .selection_ui(ctx.re_ui, ui, blueprint) - { - ctx.set_multi_selection(selection.iter().cloned()); + let mut available_devices = ctx.depthai_state.get_devices(); + let mut currently_selected_device = + ctx.depthai_state.selected_device.clone(); + let mut combo_device: depthai::DeviceId = currently_selected_device.id; + if combo_device != "" && available_devices.is_empty() { + available_devices.push(combo_device.clone()); } - }); + ui.vertical(|ui| { + ui.horizontal(|ui| { + ui.label("Device: "); + egui::ComboBox::from_id_source("device") + .width(70.0) + .selected_text(if combo_device != "" { + combo_device.clone().to_string() + } else { + "No device selected".to_string() + }) + .show_ui(ui, |ui| { + if ui + .selectable_value( + &mut combo_device, + "".to_string(), + "No device", + ) + .changed() + { + ctx.depthai_state.set_device(combo_device.clone()); + } + for device in available_devices { + if ui + .selectable_value( + &mut combo_device, + device.clone().to_string(), + device.to_string(), + ) + .changed() + { + ctx.depthai_state.set_device(combo_device.clone()); + } + } + }); + }); - egui::ScrollArea::both() - .auto_shrink([false; 2]) - .show(ui, |ui| { - egui::Frame { - inner_margin: egui::Margin::same(re_ui::ReUi::view_padding()), + if ctx.depthai_state.applied_device_config.update_in_progress { + ui.add_sized([ui.available_width(), 10.0], |ui: &mut egui::Ui| { + ui.with_layout( + egui::Layout::left_to_right(egui::Align::Center), + |ui| ui.add(egui::Spinner::new()), + ) + .response + }); + // The following lines are a hack to force the top panel to resize to a usable size + // after updating the device config, when updating set min height to 10 then detect if + // it's 10 the config has been updated, set the panel to be of size 200.0, then in the next frame + // set min height to 20.0 so user can still resize the panel to be very small + self.current_device_config_panel_min_height = 10.0; + return; + } else if self.current_device_config_panel_min_height == 10.0 { + self.current_device_config_panel_min_height = + self.device_config_panel_height; + } else { + self.current_device_config_panel_min_height = 20.0; + } + let mut imu_tab_visible = false; + let unsubscribe_from_imu = !self.imu_tab_visible; + DockArea::new(&mut self.depthai_tabs) + .id(egui::Id::new("depthai_tabs")) + .style(re_ui::egui_dock_style(ui.style())) + .show_inside( + ui, + &mut DepthaiTabs { + ctx, + accel_history: &mut self.accel_history, + gyro_history: &mut self.gyro_history, + magnetometer_history: &mut self.magnetometer_history, + now: self.start_time.elapsed().as_nanos() as f64 / 1e9, + unsubscribe_from_imu, + imu_visible: &mut imu_tab_visible, + apply_button_enabled: &mut self.apply_cfg_button_enabled, + }, + ); + self.imu_tab_visible = imu_tab_visible; + }); + }) + .response + .rect; + // When panel isn't small keep remembering the height of the panel + if self.current_device_config_panel_min_height != 10.0 { + self.device_config_panel_height = (response_rect.max - response_rect.min).y; + } + + egui::CentralPanel::default().show_inside(ui, |ui| { + egui::TopBottomPanel::top("selection_panel_title_bar") + .exact_height(re_ui::ReUi::title_bar_height()) + .frame(egui::Frame { + inner_margin: egui::Margin::symmetric(re_ui::ReUi::view_padding(), 0.0), ..Default::default() - } + }) + .show_inside(ui, |ui| { + if let Some(selection) = ctx + .rec_cfg + .selection_state + .selection_ui(ctx.re_ui, ui, blueprint) + { + ctx.set_multi_selection(selection.iter().cloned()); + } + }); + + egui::ScrollArea::both() + .auto_shrink([true; 2]) .show(ui, |ui| { - self.contents(ui, ctx, blueprint); + egui::Frame { + inner_margin: egui::Margin::same(re_ui::ReUi::view_padding()), + ..Default::default() + } + .show(ui, |ui| { + self.contents(ui, ctx, blueprint); + }); }); - }); + }); }, ); } @@ -409,35 +986,98 @@ fn entity_props_ui( }); } -fn colormap_props_ui(ui: &mut egui::Ui, entity_props: &mut EntityProperties) { - let current = *entity_props.color_mapper.get(); +fn colormap_props_ui( + ctx: &mut ViewerContext<'_>, + ui: &mut egui::Ui, + entity_path: &EntityPath, + entity_props: &mut EntityProperties, +) { + // Color mapping picker + { + let current = *entity_props.color_mapper.get(); + ui.label("Color map"); + egui::ComboBox::from_id_source("depth_color_mapper") + .selected_text(current.to_string()) + .show_ui(ui, |ui| { + ui.style_mut().wrap = Some(false); + ui.set_min_width(64.0); + + let mut add_label = |proposed| { + if ui + .selectable_label(current == proposed, proposed.to_string()) + .clicked() + { + entity_props.color_mapper = EditableAutoValue::Auto(proposed); + } + }; - ui.label("Color map"); - egui::ComboBox::from_id_source("color_mapper") - .selected_text(current.to_string()) - .show_ui(ui, |ui| { + add_label(ColorMapper::Colormap(Colormap::Grayscale)); + add_label(ColorMapper::Colormap(Colormap::Turbo)); + add_label(ColorMapper::Colormap(Colormap::Viridis)); + add_label(ColorMapper::Colormap(Colormap::Plasma)); + add_label(ColorMapper::Colormap(Colormap::Magma)); + add_label(ColorMapper::Colormap(Colormap::Inferno)); + add_label(ColorMapper::AlbedoTexture); + }); + ui.end_row(); + } + + if *entity_props.color_mapper.get() != ColorMapper::AlbedoTexture { + return; + } + + // Albedo texture picker + if let Some(tree) = entity_path + .parent() + .and_then(|path| ctx.log_db.entity_db.tree.subtree(&path)) + { + let query = ctx.current_query(); + let current = entity_props.albedo_texture.clone(); + + ui.label("Albedo texture"); + + let mut combo = egui::ComboBox::from_id_source("depth_color_texture"); + if let Some(current) = current.as_ref() { + combo = combo.selected_text(current.to_string()); + } else { + // Select the first image-shaped tensor we find + // tree.visit_children_recursively(&mut |ent_path| { + // if entity_props.albedo_texture.is_some() { + // return; + // } + // let Some(tensor) = + // query_latest_single::(&ctx.log_db.entity_db, ent_path, &query) else { + // return; + // }; + // if tensor.is_shaped_like_an_image() { + // entity_props.albedo_texture = Some(ent_path.clone()); + // } + // }); + } + + combo.show_ui(ui, |ui| { ui.style_mut().wrap = Some(false); ui.set_min_width(64.0); - // TODO(cmc): that is not ideal but I don't want to import yet another proc-macro... - let mut add_label = |proposed| { - if ui - .selectable_label(current == proposed, proposed.to_string()) - .clicked() + tree.visit_children_recursively(&mut |ent_path| { + let Some(tensor) = query_latest_single::( + &ctx.log_db.entity_db, + ent_path, + &query, + ) else { + return; + }; + + if tensor.is_shaped_like_an_image() + && ui + .selectable_label(current.as_ref() == Some(ent_path), ent_path.to_string()) + .clicked() { - entity_props.color_mapper = EditableAutoValue::Auto(proposed); + entity_props.albedo_texture = Some(ent_path.clone()); } - }; - - add_label(ColorMapper::Colormap(Colormap::Grayscale)); - add_label(ColorMapper::Colormap(Colormap::Turbo)); - add_label(ColorMapper::Colormap(Colormap::Viridis)); - add_label(ColorMapper::Colormap(Colormap::Plasma)); - add_label(ColorMapper::Colormap(Colormap::Magma)); - add_label(ColorMapper::Colormap(Colormap::Inferno)); + }); }); - - ui.end_row(); + } } fn pinhole_props_ui( @@ -510,9 +1150,25 @@ fn depth_props_ui( backproject_radius_scale_ui(ui, &mut entity_props.backproject_radius_scale); + ui.label("Backproject radius scale"); + let mut radius_scale = *entity_props.backproject_radius_scale.get(); + let speed = (radius_scale * 0.001).at_least(0.001); + if ui + .add( + egui::DragValue::new(&mut radius_scale) + .clamp_range(0.0..=1.0e8) + .speed(speed), + ) + .on_hover_text("Scales the radii of the points in the backprojected point cloud") + .changed() + { + entity_props.backproject_radius_scale = EditableAutoValue::UserEdited(radius_scale); + } + ui.end_row(); + // TODO(cmc): This should apply to the depth map entity as a whole, but for that we // need to get the current hardcoded colormapping out of the image cache first. - colormap_props_ui(ui, entity_props); + colormap_props_ui(ctx, ui, entity_path, entity_props); } Some(()) @@ -522,6 +1178,7 @@ fn depth_from_world_scale_ui(ui: &mut egui::Ui, property: &mut EditableAutoValue ui.label("Backproject meter"); let mut value = *property.get(); let speed = (value * 0.05).at_least(0.01); + let response = ui .add( egui::DragValue::new(&mut value) diff --git a/crates/re_viewer/src/ui/space_view.rs b/crates/re_viewer/src/ui/space_view.rs index 016491107a31..45a29b96734f 100644 --- a/crates/re_viewer/src/ui/space_view.rs +++ b/crates/re_viewer/src/ui/space_view.rs @@ -9,8 +9,8 @@ use crate::{ use super::{ data_blueprint::DataBlueprintTree, space_view_heuristics::default_queried_entities, - view_bar_chart, view_category::ViewCategory, view_spatial, view_tensor, view_text, - view_time_series, + view_bar_chart, view_category::ViewCategory, view_node_graph, view_spatial, view_tensor, + view_text, view_time_series, }; // ---------------------------------------------------------------------------- @@ -191,6 +191,7 @@ impl SpaceView { } } } + ViewCategory::NodeGraph => self.view_state.state_node_graph.selection_ui(ctx.re_ui, ui), } } @@ -262,6 +263,11 @@ impl SpaceView { scene.load(ctx, &query); self.view_state.ui_tensor(ctx, ui, &scene); } + ViewCategory::NodeGraph => { + let mut scene = view_node_graph::SceneNodeGraph::default(); + scene.load(ctx, &query); + self.view_state.ui_node_graph(ctx, ui, &scene); + } }; } @@ -324,6 +330,7 @@ pub struct ViewState { state_bar_chart: view_bar_chart::BarChartState, pub state_spatial: view_spatial::ViewSpatialState, state_tensors: ahash::HashMap, + state_node_graph: view_node_graph::ViewNodeGraphState, } impl ViewState { @@ -410,6 +417,21 @@ impl ViewState { }); } + fn ui_node_graph( + &mut self, + ctx: &mut ViewerContext<'_>, + ui: &mut egui::Ui, + scene: &view_node_graph::SceneNodeGraph, + ) { + egui::Frame { + inner_margin: re_ui::ReUi::view_padding().into(), + ..egui::Frame::default() + } + .show(ui, |ui| { + view_node_graph::view_node_graph(ctx, ui, &mut self.state_node_graph, scene) + }); + } + fn ui_bar_chart( &mut self, ctx: &mut ViewerContext<'_>, diff --git a/crates/re_viewer/src/ui/space_view_heuristics.rs b/crates/re_viewer/src/ui/space_view_heuristics.rs index df1da9956907..a3eb14616210 100644 --- a/crates/re_viewer/src/ui/space_view_heuristics.rs +++ b/crates/re_viewer/src/ui/space_view_heuristics.rs @@ -256,6 +256,7 @@ fn is_default_added_to_space_view( re_log_types::component_types::InstanceKey::name(), re_log_types::component_types::KeypointId::name(), DataStore::insert_id_key(), + re_log_types::ImuData::name(), // Separate plotting view for IMU data. ]; entity_path.is_descendant_of(space_path) diff --git a/crates/re_viewer/src/ui/view_category.rs b/crates/re_viewer/src/ui/view_category.rs index 9a23512e42af..d0166aa2815e 100644 --- a/crates/re_viewer/src/ui/view_category.rs +++ b/crates/re_viewer/src/ui/view_category.rs @@ -2,7 +2,8 @@ use re_arrow_store::{LatestAtQuery, TimeInt}; use re_data_store::{EntityPath, LogDb, Timeline}; use re_log_types::{ component_types::{ - Box3D, LineStrip2D, LineStrip3D, Point2D, Point3D, Rect2D, Scalar, Tensor, TextEntry, + Box3D, LineStrip2D, LineStrip3D, NodeGraph, Point2D, Point3D, Rect2D, Scalar, Tensor, + TextEntry, }, Arrow3D, Component, Mesh3D, Transform, }; @@ -29,6 +30,7 @@ pub enum ViewCategory { /// High-dimensional tensor view Tensor, + NodeGraph, } impl ViewCategory { @@ -39,6 +41,7 @@ impl ViewCategory { ViewCategory::BarChart => &re_ui::icons::SPACE_VIEW_HISTOGRAM, ViewCategory::Spatial => &re_ui::icons::SPACE_VIEW_3D, ViewCategory::Tensor => &re_ui::icons::SPACE_VIEW_TENSOR, + ViewCategory::NodeGraph => &re_ui::icons::SPACE_VIEW_TENSOR, // TODO(filip): add icon } } } @@ -51,6 +54,7 @@ impl std::fmt::Display for ViewCategory { ViewCategory::BarChart => "Bar Chart", ViewCategory::Spatial => "Spatial", ViewCategory::Tensor => "Tensor", + ViewCategory::NodeGraph => "Node Graph", }) } } @@ -111,6 +115,8 @@ pub fn categorize_entity_path( } } } + } else if component == NodeGraph::name() { + set.insert(ViewCategory::NodeGraph); } } diff --git a/crates/re_viewer/src/ui/view_node_graph/mod.rs b/crates/re_viewer/src/ui/view_node_graph/mod.rs new file mode 100644 index 000000000000..2839c6b28214 --- /dev/null +++ b/crates/re_viewer/src/ui/view_node_graph/mod.rs @@ -0,0 +1,5 @@ +mod scene; +pub(crate) use self::scene::{NodeGraphEntry, SceneNodeGraph}; + +mod ui; +pub(crate) use self::ui::{view_node_graph, ViewNodeGraphState}; diff --git a/crates/re_viewer/src/ui/view_node_graph/scene.rs b/crates/re_viewer/src/ui/view_node_graph/scene.rs new file mode 100644 index 000000000000..5d0aa163daf1 --- /dev/null +++ b/crates/re_viewer/src/ui/view_node_graph/scene.rs @@ -0,0 +1,42 @@ +use re_arrow_store::TimeRange; +use re_data_store::EntityPath; +use re_log_types::{ + component_types::{self, InstanceKey}, + Component, +}; +use re_query::{range_entity_with_primary, QueryError}; + +use crate::{ui::SceneQuery, ViewerContext}; + +// --- + +#[derive(Debug, Clone)] +pub struct NodeGraphEntry { + pub entity_path: EntityPath, + + /// `None` for timeless data. + pub time: Option, + + pub color: Option<[u8; 4]>, + + pub level: Option, + + pub body: String, +} + +/// A NodeGraph scene, with everything needed to render it. +#[derive(Default)] +pub struct SceneNodeGraph { + pub NodeGraph_entries: Vec, +} + +impl SceneNodeGraph { + /// Loads all NodeGraph components into the scene according to the given query. + pub(crate) fn load(&mut self, ctx: &ViewerContext<'_>, query: &SceneQuery<'_>) { + crate::profile_function!(); + + let store = &ctx.log_db.entity_db.data_store; + + for entity_path in query.entity_paths {} + } +} diff --git a/crates/re_viewer/src/ui/view_node_graph/ui.rs b/crates/re_viewer/src/ui/view_node_graph/ui.rs new file mode 100644 index 000000000000..2e08045cc3d5 --- /dev/null +++ b/crates/re_viewer/src/ui/view_node_graph/ui.rs @@ -0,0 +1,96 @@ +use std::collections::BTreeMap; + +use egui::{Color32, RichText}; + +use re_data_store::{EntityPath, Timeline}; +use re_log_types::TimePoint; + +use crate::ViewerContext; + +use super::{NodeGraphEntry, SceneNodeGraph}; +// --- Main view --- + +#[derive(Clone, Default, serde::Deserialize, serde::Serialize)] +#[serde(default)] +pub struct ViewNodeGraphState { + /// Keeps track of the latest time selection made by the user. + /// + /// We need this because we want the user to be able to manually scroll the + /// NodeGraph entry window however they please when the time cursor isn't moving. + latest_time: i64, + + pub filters: ViewNodeGraphFilters, + + monospace: bool, +} + +impl ViewNodeGraphState { + pub fn selection_ui(&mut self, re_ui: &re_ui::ReUi, ui: &mut egui::Ui) { + crate::profile_function!(); + re_log::info!("Holda from node graph"); + } +} + +pub(crate) fn view_node_graph( + ctx: &mut ViewerContext<'_>, + ui: &mut egui::Ui, + state: &mut ViewNodeGraphState, + scene: &SceneNodeGraph, +) -> egui::Response { + crate::profile_function!(); + + ui.with_layout(egui::Layout::top_down(egui::Align::Center), |ui| { + if ui.button("Button text").clicked() { + re_log::info!("Holda from node graph"); + } + }) + .response +} + +// --- Filters --- + +// TODO(cmc): implement "body contains " filter. +// TODO(cmc): beyond filters, it'd be nice to be able to swap columns at some point. +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] +pub struct ViewNodeGraphFilters { + // Column filters: which columns should be visible? + // Timelines are special: each one has a dedicated column. + pub col_timelines: BTreeMap, + pub col_entity_path: bool, + pub col_log_level: bool, + + // Row filters: which rows should be visible? + pub row_entity_paths: BTreeMap, + pub row_log_levels: BTreeMap, +} + +impl Default for ViewNodeGraphFilters { + fn default() -> Self { + Self { + col_entity_path: true, + col_log_level: true, + col_timelines: Default::default(), + row_entity_paths: Default::default(), + row_log_levels: Default::default(), + } + } +} + +impl ViewNodeGraphFilters { + pub fn is_entity_path_visible(&self, entity_path: &EntityPath) -> bool { + self.row_entity_paths + .get(entity_path) + .copied() + .unwrap_or(true) + } + + pub fn is_log_level_visible(&self, level: &str) -> bool { + self.row_log_levels.get(level).copied().unwrap_or(true) + } + + // Checks whether new values are available for any of the filters, and updates everything + // accordingly. + fn update(&mut self, ctx: &mut ViewerContext<'_>, NodeGraph_entries: &[NodeGraphEntry]) { + crate::profile_function!(); + } +} diff --git a/crates/re_viewer/src/ui/view_spatial/eye.rs b/crates/re_viewer/src/ui/view_spatial/eye.rs index 287481311101..d1909b7cfee5 100644 --- a/crates/re_viewer/src/ui/view_spatial/eye.rs +++ b/crates/re_viewer/src/ui/view_spatial/eye.rs @@ -165,6 +165,7 @@ impl Eye { pub struct OrbitEye { pub orbit_center: Vec3, pub orbit_radius: f32, + pub world_from_view_rot: Quat, pub fov_y: f32, @@ -173,11 +174,38 @@ pub struct OrbitEye { /// For controlling the eye with WSAD in a smooth way. pub velocity: Vec3, + + /// Left over scroll delta that still needs to be applied (smoothed out over several frames) + #[serde(skip)] + unprocessed_scroll_delta: f32, } impl OrbitEye { const MAX_PITCH: f32 = 0.999 * 0.25 * std::f32::consts::TAU; + /// Scroll wheels delta are capped out at this value per second. Anything above is smoothed out over several frames. + /// + /// We generally only want this to only kick in when the user scrolls fast while we maintain very high framerate, + /// so don't go too low! + /// + /// To give a sense of ballpark: + /// * measured 14.0 as the value of a single notch on a logitech mouse wheel connected to a Macbook returns in a single frame (!) + /// (so scrolling 10 notches in a tenth of a second gives a per second scroll delta of 1400) + /// * macbook trackpad is typically at max 1.0 in every given frame + const MAX_SCROLL_DELTA_PER_SECOND: f32 = 1000.0; + + pub fn new(orbit_center: Vec3, orbit_radius: f32, world_from_view_rot: Quat, up: Vec3) -> Self { + OrbitEye { + orbit_center, + orbit_radius, + world_from_view_rot, + fov_y: Eye::DEFAULT_FOV_Y, + up, + velocity: Vec3::ZERO, + unprocessed_scroll_delta: 0.0, + } + } + pub fn position(&self) -> Vec3 { self.orbit_center + self.world_from_view_rot * vec3(0.0, 0.0, self.orbit_radius) } @@ -213,6 +241,10 @@ impl OrbitEye { fov_y: egui::lerp(self.fov_y..=other.fov_y, t), up: self.up.lerp(other.up, t).normalize_or_zero(), velocity: self.velocity.lerp(other.velocity, t), + unprocessed_scroll_delta: lerp( + self.unprocessed_scroll_delta..=other.unprocessed_scroll_delta, + t, + ), } } @@ -261,8 +293,9 @@ impl OrbitEye { } } - /// Returns `true` if any change - pub fn interact(&mut self, response: &egui::Response, drag_threshold: f32) -> bool { + /// Returns `true` if interaction occurred. + /// I.e. the camera changed via user input. + pub fn update(&mut self, response: &egui::Response, drag_threshold: f32) -> bool { let mut did_interact = false; if response.drag_delta().length() > drag_threshold { @@ -283,21 +316,42 @@ impl OrbitEye { } } - if response.hovered() { + let (zoom_delta, raw_scroll_delta) = if response.hovered() { self.keyboard_navigation(&response.ctx); - let factor = response - .ctx - .input(|i| i.zoom_delta() * (i.scroll_delta.y / 200.0).exp()); - if factor != 1.0 { - let new_radius = self.orbit_radius / factor; - - // Don't let radius go too small or too big because this might cause infinity/nan in some calculations. - // Max value is chosen with some generous margin of an observed crash due to infinity. - if f32::MIN_POSITIVE < new_radius && new_radius < 1.0e17 { - self.orbit_radius = new_radius; - } + response.ctx.input(|i| (i.zoom_delta(), i.scroll_delta.y)) + } else { + (1.0, 0.0) + }; + if zoom_delta != 1.0 || raw_scroll_delta != 0.0 { + did_interact = true; + } - did_interact = true; + // Mouse wheels often go very large steps! + // This makes the zoom speed feel clunky, so we smooth it out over several frames. + let frame_delta = response.ctx.input(|i| i.stable_dt).at_most(0.1); + let accumulated_scroll_delta = raw_scroll_delta + self.unprocessed_scroll_delta; + let unsmoothed_scroll_per_second = accumulated_scroll_delta / frame_delta; + let scroll_dir = unsmoothed_scroll_per_second.signum(); + let scroll_delta = scroll_dir + * unsmoothed_scroll_per_second + .abs() + .at_most(Self::MAX_SCROLL_DELTA_PER_SECOND) + * frame_delta; + self.unprocessed_scroll_delta = accumulated_scroll_delta - scroll_delta; + + if self.unprocessed_scroll_delta.abs() > 0.1 { + // We have a lot of unprocessed scroll delta, so we need to keep calling this function. + response.ctx.request_repaint(); + } + + let zoom_factor = zoom_delta * (scroll_delta / 200.0).exp(); + if zoom_factor != 1.0 { + let new_radius = self.orbit_radius / zoom_factor; + + // Don't let radius go too small or too big because this might cause infinity/nan in some calculations. + // Max value is chosen with some generous margin of an observed crash due to infinity. + if f32::MIN_POSITIVE < new_radius && new_radius < 1.0e17 { + self.orbit_radius = new_radius; } } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 0afad5c77dd3..81c2688a943a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -7,7 +7,7 @@ use re_log_types::{ component_types::{ClassId, InstanceKey, KeypointId}, DecodedTensor, MeshId, }; -use re_renderer::{Color32, OutlineMaskPreference, Size}; +use re_renderer::{renderer::TexturedRect, Color32, OutlineMaskPreference, Size}; use crate::{ misc::{mesh_loader::LoadedMesh, SpaceViewHighlights, TransformCache, ViewerContext}, @@ -63,15 +63,8 @@ pub struct Image { pub tensor: DecodedTensor, - /// If this is a depth map, how long is a meter? - /// - /// For example, with a `u16` dtype one might have - /// `meter == 1000.0` for millimeter precision - /// up to a ~65m range. - pub meter: Option, - - /// A thing that provides additional semantic context for your dtype. - pub annotations: Arc, + /// Textured rectangle for the renderer. + pub textured_rect: TexturedRect, } pub enum UiLabelTarget { @@ -104,9 +97,6 @@ pub struct SceneSpatialUiData { /// Picking any any of these rects cause the referred instance to be hovered. /// Only use this for 2d overlays! pub pickable_ui_rects: Vec<(egui::Rect, InstancePathHash)>, - - /// Images are a special case of rects where we're storing some extra information to allow miniature previews etc. - pub images: Vec, } pub struct SceneSpatial { @@ -231,7 +221,7 @@ impl SceneSpatial { return SpatialNavigationMode::ThreeD; } - if !self.ui.images.is_empty() { + if !self.primitives.images.is_empty() { return SpatialNavigationMode::TwoD; } if self.num_logged_3d_objects == 0 { diff --git a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs index ddf73c2fdfbf..8d8325fdff1a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs @@ -2,10 +2,10 @@ use ahash::HashSet; use re_data_store::InstancePathHash; -use re_log_types::{component_types::InstanceKey, EntityPathHash}; +use re_log_types::component_types::InstanceKey; use re_renderer::PickingLayerProcessor; -use super::{SceneSpatialPrimitives, SceneSpatialUiData}; +use super::{Image, SceneSpatialPrimitives, SceneSpatialUiData}; use crate::{ misc::instance_hash_conversions::instance_path_hash_from_picking_layer_id, ui::view_spatial::eye::Eye, @@ -116,11 +116,7 @@ impl PickingContext { self, previous_picking_result, ); - let mut rect_hits = picking_textured_rects( - self, - &primitives.textured_rectangles, - &primitives.textured_rectangles_ids, - ); + let mut rect_hits = picking_textured_rects(self, &primitives.images); rect_hits.sort_by(|a, b| b.depth_offset.cmp(&a.depth_offset)); let ui_rect_hits = picking_ui_rects(self, ui_data); @@ -241,23 +237,13 @@ fn picking_gpu( } } -fn picking_textured_rects( - context: &PickingContext, - textured_rectangles: &[re_renderer::renderer::TexturedRect], - textured_rectangles_ids: &[EntityPathHash], -) -> Vec { +fn picking_textured_rects(context: &PickingContext, images: &[Image]) -> Vec { crate::profile_function!(); let mut hits = Vec::new(); - for (rect, id) in textured_rectangles - .iter() - .zip(textured_rectangles_ids.iter()) - { - if !id.is_some() { - continue; - } - + for image in images { + let rect = &image.textured_rect; let rect_plane = macaw::Plane3::from_normal_point( rect.extent_u.cross(rect.extent_v).normalize(), rect.top_left_corner_position, @@ -277,7 +263,7 @@ fn picking_textured_rects( if (0.0..=1.0).contains(&u) && (0.0..=1.0).contains(&v) { hits.push(PickingRayHit { instance_path_hash: InstancePathHash { - entity_path_hash: *id, + entity_path_hash: image.ent_path.hash(), instance_key: InstanceKey::from_2d_image_coordinate( [ (u * rect.colormapped_texture.texture.width() as f32) as u32, diff --git a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs index 392f0bde4fa1..407d1eec47fa 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs @@ -1,6 +1,6 @@ use egui::Color32; use re_data_store::EntityPath; -use re_log_types::{component_types::InstanceKey, EntityPathHash}; +use re_log_types::component_types::InstanceKey; use re_renderer::{ renderer::{DepthClouds, MeshInstance}, LineStripSeriesBuilder, PointCloudBuilder, @@ -20,11 +20,7 @@ pub struct SceneSpatialPrimitives { /// Estimated bounding box of all data in scene coordinates. Accumulated. pub(super) bounding_box: macaw::BoundingBox, - // TODO(andreas): Storing extra data like so is unsafe and not future proof either - // (see also above comment on the need to separate cpu-readable data) - pub textured_rectangles_ids: Vec, - pub textured_rectangles: Vec, - + pub images: Vec, pub line_strips: LineStripSeriesBuilder, pub points: PointCloudBuilder, pub meshes: Vec, @@ -44,8 +40,7 @@ impl SceneSpatialPrimitives { pub fn new(re_ctx: &mut re_renderer::RenderContext) -> Self { Self { bounding_box: macaw::BoundingBox::nothing(), - textured_rectangles_ids: Default::default(), - textured_rectangles: Default::default(), + images: Default::default(), line_strips: LineStripSeriesBuilder::new(re_ctx) .radius_boost_in_ui_points_for_outlines(SIZE_BOOST_IN_POINTS_FOR_LINE_OUTLINES), points: PointCloudBuilder::new(re_ctx) @@ -68,8 +63,7 @@ impl SceneSpatialPrimitives { pub fn num_primitives(&self) -> usize { let Self { bounding_box: _, - textured_rectangles, - textured_rectangles_ids: _, + images, line_strips, points, meshes, @@ -77,7 +71,7 @@ impl SceneSpatialPrimitives { any_outlines: _, } = &self; - textured_rectangles.len() + images.len() + line_strips.vertices.len() + points.vertices.len() + meshes.len() @@ -89,8 +83,7 @@ impl SceneSpatialPrimitives { let Self { bounding_box, - textured_rectangles_ids: _, - textured_rectangles, + images, line_strips, points, meshes, @@ -100,7 +93,8 @@ impl SceneSpatialPrimitives { *bounding_box = macaw::BoundingBox::nothing(); - for rect in textured_rectangles { + for image in images { + let rect = &image.textured_rect; bounding_box.extend(rect.top_left_corner_position); bounding_box.extend(rect.top_left_corner_position + rect.extent_u); bounding_box.extend(rect.top_left_corner_position + rect.extent_v); diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs index 5e1227feb4ad..ff3a2b18748a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs @@ -9,7 +9,8 @@ use re_log_types::{ }; use re_query::{query_primary_with_history, EntityView, QueryError}; use re_renderer::{ - renderer::{DepthCloud, DepthCloudDepthData, RectangleOptions}, + renderer::{DepthCloud, DepthCloudAlbedoData, RectangleOptions}, + resource_managers::Texture2DCreationDesc, Colormap, OutlineMaskPreference, }; @@ -24,9 +25,7 @@ use crate::{ use super::ScenePart; -#[allow(clippy::too_many_arguments)] -fn push_tensor_texture( - scene: &mut SceneSpatial, +fn to_textured_rect( ctx: &mut ViewerContext<'_>, annotations: &Annotations, world_from_obj: glam::Mat4, @@ -34,10 +33,10 @@ fn push_tensor_texture( tensor: &DecodedTensor, multiplicative_tint: egui::Rgba, outline_mask: OutlineMaskPreference, -) { +) -> Option { crate::profile_function!(); - let Some([height, width, _]) = tensor.image_height_width_channels() else { return; }; + let Some([height, width, _]) = tensor.image_height_width_channels() else { return None; }; let debug_name = ent_path.to_string(); let tensor_stats = ctx.cache.tensor_stats(tensor); @@ -67,7 +66,7 @@ fn push_tensor_texture( re_renderer::renderer::TextureFilterMin::Linear }; - let textured_rect = re_renderer::renderer::TexturedRect { + Some(re_renderer::renderer::TexturedRect { top_left_corner_position: world_from_obj.transform_point3(glam::Vec3::ZERO), extent_u: world_from_obj.transform_vector3(glam::Vec3::X * width as f32), extent_v: world_from_obj.transform_vector3(glam::Vec3::Y * height as f32), @@ -79,15 +78,11 @@ fn push_tensor_texture( depth_offset: -1, // Push to background. Mostly important for mouse picking order! outline_mask, }, - }; - scene.primitives.textured_rectangles.push(textured_rect); - scene - .primitives - .textured_rectangles_ids - .push(ent_path.hash()); + }) } Err(err) => { re_log::error_once!("Failed to create texture from tensor for {debug_name:?}: {err}"); + None } } } @@ -98,15 +93,17 @@ fn handle_image_layering(scene: &mut SceneSpatial) { // Handle layered rectangles that are on (roughly) the same plane and were logged in sequence. // First, group by similar plane. // TODO(andreas): Need planes later for picking as well! - let rects_grouped_by_plane = { + let images_grouped_by_plane = { let mut cur_plane = macaw::Plane3::from_normal_dist(Vec3::NAN, std::f32::NAN); let mut rectangle_group = Vec::new(); scene .primitives - .textured_rectangles - .iter_mut() + .images + .drain(..) // We rebuild the list as we might reorder as well! .batching(move |it| { - for rect in it.by_ref() { + for image in it { + let rect = &image.textured_rect; + let prev_plane = cur_plane; cur_plane = macaw::Plane3::from_normal_point( rect.extent_u.cross(rect.extent_v).normalize(), @@ -118,10 +115,10 @@ fn handle_image_layering(scene: &mut SceneSpatial) { && prev_plane.normal.dot(cur_plane.normal) < 0.99 && (prev_plane.d - cur_plane.d) < 0.01 { - let previous_group = std::mem::replace(&mut rectangle_group, vec![rect]); + let previous_group = std::mem::replace(&mut rectangle_group, vec![image]); return Some(previous_group); } - rectangle_group.push(rect); + rectangle_group.push(image); } if !rectangle_group.is_empty() { Some(rectangle_group.drain(..).collect()) @@ -129,14 +126,19 @@ fn handle_image_layering(scene: &mut SceneSpatial) { None } }) - }; - // Then, change opacity & transformation for planes within group except the base plane. - for mut grouped_rects in rects_grouped_by_plane { - let total_num_images = grouped_rects.len(); - for (idx, rect) in grouped_rects.iter_mut().enumerate() { + } + .collect_vec(); + + // Then, for each planar group do resorting and change transparency. + for mut grouped_images in images_grouped_by_plane { + // Class id images should generally come last as they typically have large areas being zeroed out (which maps to fully transparent). + grouped_images.sort_by_key(|image| image.tensor.meaning == TensorDataMeaning::ClassId); + + let total_num_images = grouped_images.len(); + for (idx, image) in grouped_images.iter_mut().enumerate() { // Set depth offset for correct order and avoid z fighting when there is a 3d camera. // Keep behind depth offset 0 for correct picking order. - rect.options.depth_offset = + image.textured_rect.options.depth_offset = (idx as isize - total_num_images as isize) as re_renderer::DepthOffset; // make top images transparent @@ -145,8 +147,14 @@ fn handle_image_layering(scene: &mut SceneSpatial) { } else { 1.0 / total_num_images.at_most(20) as f32 }; // avoid precision problems in framebuffer - rect.options.multiplicative_tint = rect.options.multiplicative_tint.multiply(opacity); + image.textured_rect.options.multiplicative_tint = image + .textured_rect + .options + .multiplicative_tint + .multiply(opacity); } + + scene.primitives.images.extend(grouped_images); } } @@ -159,7 +167,7 @@ impl ImagesPart { scene: &mut SceneSpatial, ctx: &mut ViewerContext<'_>, transforms: &TransformCache, - properties: &EntityProperties, + properties: &mut EntityProperties, ent_path: &EntityPath, world_from_obj: glam::Mat4, highlights: &SpaceViewHighlights, @@ -189,16 +197,6 @@ impl ImagesPart { }; let annotations = scene.annotation_map.find(ent_path); - - // TODO(jleibs): Meter should really be its own component - let meter = tensor.meter; - scene.ui.images.push(Image { - ent_path: ent_path.clone(), - tensor: tensor.clone(), - meter, - annotations: annotations.clone(), - }); - let entity_highlight = highlights.entity_outline_mask(ent_path.hash()); if *properties.backproject_depth.get() && tensor.meaning == TensorDataMeaning::Depth { @@ -233,8 +231,7 @@ impl ImagesPart { DefaultColor::OpaqueWhite, ); - push_tensor_texture( - scene, + if let Some(textured_rect) = to_textured_rect( ctx, &annotations, world_from_obj, @@ -242,7 +239,13 @@ impl ImagesPart { &tensor, color.into(), entity_highlight.overall, - ); + ) { + scene.primitives.images.push(Image { + ent_path: ent_path.clone(), + tensor, + textured_rect, + }); + } } Ok(()) @@ -253,7 +256,7 @@ impl ImagesPart { scene: &mut SceneSpatial, ctx: &mut ViewerContext<'_>, transforms: &TransformCache, - properties: &EntityProperties, + properties: &mut EntityProperties, tensor: &DecodedTensor, ent_path: &EntityPath, pinhole_ent_path: &EntityPath, @@ -277,26 +280,55 @@ impl ImagesPart { return Err(format!("Couldn't fetch pinhole extrinsics at {pinhole_ent_path:?}")); }; - // TODO(cmc): automagically convert as needed for non-natively supported datatypes? - let data = match &tensor.data { - // NOTE: Shallow clone if feature `arrow` is enabled, full alloc + memcpy otherwise. - TensorData::U16(data) => DepthCloudDepthData::U16(data.clone()), - TensorData::F32(data) => DepthCloudDepthData::F32(data.clone()), - _ => { - return Err(format!( - "Tensor datatype {} is not supported for backprojection", - tensor.dtype() - )); - } + let Some([height, width, _]) = tensor.image_height_width_channels() else { + return Err(format!("Tensor at {ent_path:?} is not an image")); + }; + let dimensions = glam::UVec2::new(width as _, height as _); + + let depth_texture = { + // Ideally, we'd use the same key as for displaying the texture, but we might make other compromises regarding formats etc.! + // So to not couple this, we use a different key here + let texture_key = egui::util::hash((tensor.id(), "depth_cloud")); + let mut data_f32 = Vec::new(); + ctx.render_ctx + .texture_manager_2d + .get_or_try_create_with( + texture_key, + &mut ctx.render_ctx.gpu_resources.textures, + || { + // TODO(andreas/cmc): Ideally we'd upload the u16 data as-is. + // However, R16Unorm is behind a feature flag and Depth16Unorm doesn't work on WebGL (and is awkward as this is a depth buffer format!). + let data = match &tensor.data { + TensorData::U16(data) => { + data_f32.extend(data.as_slice().iter().map(|d| *d as f32)); + bytemuck::cast_slice(&data_f32).into() + } + TensorData::F32(data) => bytemuck::cast_slice(data).into(), + _ => { + return Err(format!( + "Tensor datatype {} is not supported for back-projection", + tensor.dtype() + )); + } + }; + + Ok(Texture2DCreationDesc { + label: format!("Depth cloud for {ent_path:?}").into(), + data, + format: wgpu::TextureFormat::R32Float, + width: width as _, + height: height as _, + }) + }, + ) + .map_err(|err| format!("Failed to create depth cloud texture: {err}"))? }; let depth_from_world_scale = *properties.depth_from_world_scale.get(); - let world_depth_from_data_depth = 1.0 / depth_from_world_scale; - let (h, w) = (tensor.shape()[0].size, tensor.shape()[1].size); - let dimensions = glam::UVec2::new(w as _, h as _); + let world_depth_from_texture_depth = 1.0 / depth_from_world_scale; - let colormap = match *properties.color_mapper.get() { + let mut colormap = match *properties.color_mapper.get() { re_data_store::ColorMapper::Colormap(colormap) => match colormap { re_data_store::Colormap::Grayscale => Colormap::Grayscale, re_data_store::Colormap::Turbo => Colormap::Turbo, @@ -305,13 +337,58 @@ impl ImagesPart { re_data_store::Colormap::Magma => Colormap::Magma, re_data_store::Colormap::Inferno => Colormap::Inferno, }, + re_data_store::ColorMapper::AlbedoTexture => Colormap::AlbedoTexture, }; + let mut albedo_data = None; + let mut albedo_dimensions = glam::UVec2::ZERO; + + if colormap == Colormap::AlbedoTexture { + let tensor = properties.albedo_texture.as_ref().and_then(|path| { + query_latest_single::(&ctx.log_db.entity_db, path, &ctx.current_query()) + }); + if let Some(tensor) = tensor { + let (h, w) = (tensor.shape()[0].size, tensor.shape()[1].size); + albedo_dimensions = glam::UVec2::new(w as _, h as _); + + // TODO(cmc): How does one know whether the texture is sRGB or not at this point? + // TODO(cmc): We should easily be able to pass almost any datatype here. + + albedo_data = match &tensor.data { + TensorData::U8(data) => { + if let Some([_, _, c]) = tensor.image_height_width_channels() { + match c { + 1 => Some(DepthCloudAlbedoData::Mono8(data.0.to_vec())), + 3 => Some(DepthCloudAlbedoData::Rgb8(data.0.to_vec())), + 4 => Some(DepthCloudAlbedoData::Rgb8Srgb(data.0.to_vec())), + _ => None, + } + } else { + None + } + } + _ => { + re_log::warn_once!( + "Tensor datatype not supported for albedo texture ({:?})", + std::mem::discriminant(&tensor.data), + ); + None + } + }; + } else { + re_log::warn_once!( + "Albedo texture couldn't be fetched ({:?})", + properties.albedo_texture + ); + colormap = Colormap::Grayscale; + } + } + // We want point radius to be defined in a scale where the radius of a point // is a factor (`backproject_radius_scale`) of the diameter of a pixel projected // at that distance. let fov_y = intrinsics.fov_y().unwrap_or(1.0); - let pixel_width_from_depth = (0.5 * fov_y).tan() / (0.5 * h as f32); + let pixel_width_from_depth = (0.5 * fov_y).tan() / (0.5 * height as f32); let radius_scale = *properties.backproject_radius_scale.get(); let point_radius_from_world_depth = radius_scale * pixel_width_from_depth; @@ -321,23 +398,25 @@ impl ImagesPart { // This could only happen for Jpegs, and we should never get here. // TODO(emilk): refactor the code so that we can always calculate a range for the tensor re_log::warn_once!("Couldn't calculate range for a depth tensor!?"); - match data { - DepthCloudDepthData::U16(_) => u16::MAX as f32, - DepthCloudDepthData::F32(_) => 10.0, + match tensor.data { + TensorData::U16(_) => u16::MAX as f32, + _ => 10.0, } }; scene.primitives.depth_clouds.clouds.push(DepthCloud { world_from_obj, depth_camera_intrinsics: intrinsics.image_from_cam.into(), - world_depth_from_data_depth, + world_depth_from_texture_depth, point_radius_from_world_depth, - max_depth_in_world: world_depth_from_data_depth * max_data_value, + max_depth_in_world: max_data_value / depth_from_world_scale, depth_dimensions: dimensions, - depth_data: data, + depth_texture, colormap, outline_mask_id: entity_highlight.overall, picking_object_id: re_renderer::PickingLayerObjectId(ent_path.hash64()), + albedo_data, + albedo_dimensions, }); Ok(()) @@ -355,7 +434,7 @@ impl ScenePart for ImagesPart { ) { crate::profile_scope!("ImagesPart"); - for (ent_path, props) in query.iter_entities() { + for (ent_path, mut props) in query.iter_entities() { let Some(world_from_obj) = transforms.reference_from_entity(ent_path) else { continue; }; @@ -375,7 +454,7 @@ impl ScenePart for ImagesPart { scene, ctx, transforms, - &props, + &mut props, ent_path, world_from_obj, highlights, diff --git a/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs b/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs index f18d17a504a2..c429cbcaa5cf 100644 --- a/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs @@ -6,7 +6,7 @@ use re_log_types::{EntityPath, ViewCoordinates}; /// A logged camera that connects spaces. #[derive(Clone)] pub struct SpaceCamera3D { - /// Path to the entity which has the projection (pinhole, ortho or otherwise) transforms. + /// Path to the instance which has the projection (pinhole, ortho or otherwise) transforms. /// /// We expect the camera transform to apply to this instance and every path below it. pub ent_path: EntityPath, diff --git a/crates/re_viewer/src/ui/view_spatial/ui.rs b/crates/re_viewer/src/ui/view_spatial/ui.rs index 49209bb1b32e..bd2daf058b1e 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui.rs @@ -5,7 +5,7 @@ use re_format::format_f32; use egui::{NumExt, WidgetText}; use macaw::BoundingBox; use re_log_types::component_types::{Tensor, TensorDataMeaning}; -use re_renderer::OutlineConfig; +use re_renderer::{Colormap, OutlineConfig}; use crate::{ misc::{ @@ -30,6 +30,7 @@ use super::{ }; /// Describes how the scene is navigated, determining if it is a 2D or 3D experience. + #[derive(Debug, Clone, Copy, Default, PartialEq, Eq, serde::Deserialize, serde::Serialize)] pub enum SpatialNavigationMode { #[default] @@ -233,6 +234,40 @@ impl ViewSpatialState { properties.backproject_radius_scale = EditableAutoValue::Auto(1.0); } + let colormap = match *properties.color_mapper.get() { + re_data_store::ColorMapper::Colormap(colormap) => match colormap { + re_data_store::Colormap::Grayscale => Colormap::Grayscale, + re_data_store::Colormap::Turbo => Colormap::Turbo, + re_data_store::Colormap::Viridis => Colormap::Viridis, + re_data_store::Colormap::Plasma => Colormap::Plasma, + re_data_store::Colormap::Magma => Colormap::Magma, + re_data_store::Colormap::Inferno => Colormap::Inferno, + }, + re_data_store::ColorMapper::AlbedoTexture => Colormap::AlbedoTexture, + }; + // Set albedo texture if it is not set yet + if colormap == Colormap::AlbedoTexture && properties.albedo_texture.is_none() { + let mut tex_ep = None; + if let Some(tree) = entity_path + .parent() + .and_then(|path| ctx.log_db.entity_db.tree.subtree(&path)) + { + tree.visit_children_recursively(&mut |ent_path| { + if tex_ep.is_some() { + return; + } + let Some(tensor) = + query_latest_single::(&ctx.log_db.entity_db, ent_path, &ctx.current_query()) else { + return; + }; + if tensor.is_shaped_like_an_image() { + tex_ep = Some(ent_path.clone()); + } + }); + properties.albedo_texture = tex_ep; + } + } + data_blueprint .data_blueprints_individual() .set(entity_path.clone(), properties); @@ -421,6 +456,7 @@ impl ViewSpatialState { ); } SpatialNavigationMode::TwoD => { + self.scene_bbox_accum = self.scene_bbox; let scene_rect_accum = egui::Rect::from_min_max( self.scene_bbox_accum.min.truncate().to_array().into(), self.scene_bbox_accum.max.truncate().to_array().into(), @@ -754,28 +790,29 @@ pub fn picking( let picked_image_with_coords = if hit.hit_type == PickingHitType::TexturedRect || *ent_properties.backproject_depth.get() { - scene - .ui - .images - .iter() - .find(|image| image.ent_path == instance_path.entity_path) - .and_then(|image| { - // If we're here because of back-projection, but this wasn't actually a depth image, drop out. - // (the back-projection property may be true despite this not being a depth image!) - if hit.hit_type != PickingHitType::TexturedRect - && *ent_properties.backproject_depth.get() - && image.tensor.meaning != TensorDataMeaning::Depth - { - return None; - } - image.tensor.image_height_width_channels().map(|[_, w, _]| { + query_latest_single::( + &ctx.log_db.entity_db, + &instance_path.entity_path, + &ctx.current_query(), + ) + .and_then(|tensor| { + // If we're here because of back-projection, but this wasn't actually a depth image, drop out. + // (the back-projection property may be true despite this not being a depth image!) + if hit.hit_type != PickingHitType::TexturedRect + && *ent_properties.backproject_depth.get() + && tensor.meaning != TensorDataMeaning::Depth + { + None + } else { + tensor.image_height_width_channels().map(|[_, w, _]| { let coordinates = hit .instance_path_hash .instance_key .to_2d_image_coordinate(w); - (image, coordinates) + (tensor, coordinates) }) - }) + } + }) } else { None }; @@ -789,9 +826,9 @@ pub fn picking( instance_path.clone(), )); - response = if let Some((image, coords)) = picked_image_with_coords { - if let Some(meter) = image.meter { - if let Some(raw_value) = image.tensor.get(&[ + response = if let Some((tensor, coords)) = picked_image_with_coords { + if let Some(meter) = tensor.meter { + if let Some(raw_value) = tensor.get(&[ picking_context.pointer_in_space2d.y.round() as _, picking_context.pointer_in_space2d.x.round() as _, ]) { @@ -815,10 +852,10 @@ pub fn picking( &ctx.current_query(), ); - if let [h, w, ..] = image.tensor.shape() { + if let Some([h, w, ..]) = tensor.image_height_width_channels() { ui.separator(); ui.horizontal(|ui| { - let (w, h) = (w.size as f32, h.size as f32); + let (w, h) = (w as f32, h as f32); if *state.nav_mode.get() == SpatialNavigationMode::TwoD { let rect = egui::Rect::from_min_size( egui::Pos2::ZERO, @@ -826,24 +863,30 @@ pub fn picking( ); data_ui::image::show_zoomed_image_region_area_outline( ui, - &image.tensor, + &tensor, [coords[0] as _, coords[1] as _], space_from_ui.inverse().transform_rect(rect), ); } - let tensor_stats = *ctx.cache.tensor_stats(&image.tensor); - let debug_name = image.ent_path.to_string(); - data_ui::image::show_zoomed_image_region( - ctx.render_ctx, - ui, - &image.tensor, - &tensor_stats, - &image.annotations, - image.meter, - &debug_name, - [coords[0] as _, coords[1] as _], - ); + let tensor_name = instance_path.to_string(); + match ctx.cache.decode.try_decode_tensor_if_necessary(tensor) { + Ok(decoded_tensor) => + data_ui::image::show_zoomed_image_region( + ctx.render_ctx, + ui, + &decoded_tensor, + ctx.cache.tensor_stats(&decoded_tensor), + &scene.annotation_map.find(&instance_path.entity_path), + decoded_tensor.meter, + &tensor_name, + [coords[0] as _, coords[1] as _], + ), + Err(err) => + re_log::warn_once!( + "Encountered problem decoding tensor at path {tensor_name}: {err}" + ), + } }); } }); diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 295a1e05dec3..dfcbeb6b6d40 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -289,7 +289,7 @@ pub fn view_3d( state .state_3d .update_eye(&response, &state.scene_bbox_accum, &scene.space_cameras); - let did_interact_with_eye = orbit_eye.interact(&response, orbit_eye_drag_threshold); + let did_interact_with_eye = orbit_eye.update(&response, orbit_eye_drag_threshold); let orbit_eye = *orbit_eye; let eye = orbit_eye.to_eye(); @@ -453,6 +453,26 @@ pub fn view_3d( } } + if state.state_3d.show_bbox { + let bbox = scene.primitives.bounding_box(); + if bbox.is_something() && bbox.is_finite() { + let scale = bbox.size(); + let translation = bbox.center(); + let bbox_from_unit_cube = glam::Affine3A::from_scale_rotation_translation( + scale, + Default::default(), + translation, + ); + scene + .primitives + .line_strips + .batch("scene_bbox") + .add_box_outline(bbox_from_unit_cube) + .radius(Size::AUTO) + .color(egui::Color32::WHITE); + } + } + { let orbit_center_alpha = egui::remap_clamp( ui.input(|i| i.time) - state.state_3d.last_eye_interact_time, @@ -633,14 +653,10 @@ fn default_eye(scene_bbox: &macaw::BoundingBox, space_specs: &SpaceSpecs) -> Orb let eye_pos = center - radius * look_dir; - OrbitEye { - orbit_center: center, - orbit_radius: radius, - world_from_view_rot: Quat::from_affine3( - &Affine3A::look_at_rh(eye_pos, center, look_up).inverse(), - ), - fov_y: Eye::DEFAULT_FOV_Y, - up: space_specs.up.unwrap_or(Vec3::ZERO), - velocity: Vec3::ZERO, - } + OrbitEye::new( + center, + radius, + Quat::from_affine3(&Affine3A::look_at_rh(eye_pos, center, look_up).inverse()), + space_specs.up.unwrap_or(Vec3::ZERO), + ) } diff --git a/crates/re_viewer/src/ui/view_spatial/ui_renderer_bridge.rs b/crates/re_viewer/src/ui/view_spatial/ui_renderer_bridge.rs index 513abdf82430..e02885290f73 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_renderer_bridge.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_renderer_bridge.rs @@ -32,7 +32,11 @@ pub fn fill_view_builder( .queue_draw(&primitives.points.to_draw_data(render_ctx)?) .queue_draw(&RectangleDrawData::new( render_ctx, - &primitives.textured_rectangles, + &primitives + .images + .iter() + .map(|image| image.textured_rect.clone()) + .collect::>(), )?); if matches!(background, ScreenBackground::GenericSkybox) { diff --git a/crates/re_viewer/src/ui/view_tensor/tensor_slice_to_gpu.rs b/crates/re_viewer/src/ui/view_tensor/tensor_slice_to_gpu.rs index 761c46d5da77..887471a9e519 100644 --- a/crates/re_viewer/src/ui/view_tensor/tensor_slice_to_gpu.rs +++ b/crates/re_viewer/src/ui/view_tensor/tensor_slice_to_gpu.rs @@ -1,5 +1,8 @@ use re_log_types::{component_types::TensorCastError, DecodedTensor, TensorDataType}; -use re_renderer::{renderer::ColormappedTexture, resource_managers::Texture2DCreationDesc}; +use re_renderer::{ + renderer::ColormappedTexture, + resource_managers::{GpuTexture2D, Texture2DCreationDesc, TextureManager2DError}, +}; use crate::{ gpu_bridge::{range, RangeError}, @@ -28,10 +31,11 @@ pub fn colormapped_texture( tensor: &DecodedTensor, tensor_stats: &TensorStats, state: &ViewTensorState, -) -> Result { +) -> Result> { crate::profile_function!(); - let range = range(tensor_stats)?; + let range = + range(tensor_stats).map_err(|err| TextureManager2DError::DataCreation(err.into()))?; let texture = upload_texture_slice_to_gpu(render_ctx, tensor, state.slice())?; let color_mapping = state.color_mapping(); @@ -50,7 +54,7 @@ fn upload_texture_slice_to_gpu( render_ctx: &mut re_renderer::RenderContext, tensor: &DecodedTensor, slice_selection: &SliceSelection, -) -> Result { +) -> Result> { let id = egui::util::hash((tensor.id(), slice_selection)); crate::gpu_bridge::try_get_or_create_texture(render_ctx, id, || { diff --git a/crates/re_viewer/src/ui/view_tensor/ui.rs b/crates/re_viewer/src/ui/view_tensor/ui.rs index b26e2ba2f03b..b1c6066df1fa 100644 --- a/crates/re_viewer/src/ui/view_tensor/ui.rs +++ b/crates/re_viewer/src/ui/view_tensor/ui.rs @@ -27,7 +27,7 @@ pub struct SliceSelection { pub selector_values: BTreeMap, } -#[derive(Clone, serde::Deserialize, serde::Serialize)] +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] pub struct ViewTensorState { /// What slice are we vieiwing? slice: SliceSelection, @@ -321,7 +321,8 @@ fn paint_colormap_gradient( width, height, } - }); + }) + .map_err(|err| anyhow::anyhow!("Failed to create horizontal gradient texture: {err}"))?; let colormapped_texture = re_renderer::renderer::ColormappedTexture { texture: horizontal_gradient, diff --git a/crates/re_viewer/src/ui/view_time_series/ui.rs b/crates/re_viewer/src/ui/view_time_series/ui.rs index c2d0d52861d3..68553847af5b 100644 --- a/crates/re_viewer/src/ui/view_time_series/ui.rs +++ b/crates/re_viewer/src/ui/view_time_series/ui.rs @@ -82,9 +82,10 @@ pub(crate) fn view_time_series( plot = plot.x_grid_spacer(move |spacer| ns_grid_spacer(canvas_size, &spacer)); } - let egui::InnerResponse { + let egui::plot::PlotResponse { inner: time_x, response, + transform: _, } = plot.show(ui, |plot_ui| { if plot_ui.plot_secondary_clicked() { let timeline = ctx.rec_cfg.time_ctrl.timeline(); diff --git a/crates/re_viewer/src/ui/viewport.rs b/crates/re_viewer/src/ui/viewport.rs index 5f4a0073b992..579732a6f7a6 100644 --- a/crates/re_viewer/src/ui/viewport.rs +++ b/crates/re_viewer/src/ui/viewport.rs @@ -116,8 +116,38 @@ impl Viewport { .copied() .collect_vec(); + let visible_space_views = &self.visible; + let all_entities = visible_space_views.iter().map(|space_view_id| { + self.space_views + .get(space_view_id) + .unwrap() + .data_blueprint + .entity_paths() + }); + if !self.visible.is_empty() { + ctx.depthai_state.set_subscriptions_from_space_views( + self.space_views + .values() + .filter(|space_view| self.visible.contains(&space_view.id)) + .collect_vec(), + ); + } + for space_view_id in &space_view_ids { self.space_view_entry_ui(ctx, ui, space_view_id); + if let Some(space_view) = self.space_views.get_mut(space_view_id) { + if let Some(group) = space_view + .data_blueprint + .group(space_view.data_blueprint.root_handle()) + { + ctx.depthai_state + .entities_to_remove(&group.entities.clone()) + .iter() + .for_each(|ep| { + space_view.data_blueprint.remove_entity(ep); + }); + } + } } }); } @@ -138,6 +168,10 @@ impl Viewport { re_log::warn_once!("Bug: asked to show a ui for a Space View that doesn't exist"); return; }; + if space_view.data_blueprint.entity_paths().is_empty() { + self.remove(space_view_id); + return; + } debug_assert_eq!(space_view.id, *space_view_id); let mut visibility_changed = false; @@ -362,6 +396,7 @@ impl Viewport { spaces_info: &SpaceInfoCollection, ) { crate::profile_function!(); + // TODO(filip): Add back entities that were removed from the space view if they are available again for space_view in self.space_views.values_mut() { space_view.on_frame_start(ctx, spaces_info); @@ -374,6 +409,28 @@ impl Viewport { } } } + + let possible_space_views = default_created_space_views(ctx, spaces_info); + let mut entity_paths_added = Vec::new(); + for entity_path in ctx.depthai_state.new_auto_add_entity_paths.iter() { + for space_view in possible_space_views.iter().filter_map(|(space_view)| { + if space_view.data_blueprint.contains_entity(entity_path) { + entity_paths_added.push(entity_path.clone()); + Some(space_view.clone()) + } else { + None + } + }) { + self.add_space_view(space_view); + } + } + ctx.depthai_state.new_auto_add_entity_paths = ctx + .depthai_state + .new_auto_add_entity_paths + .iter() + .filter(|ep| !entity_paths_added.contains(ep)) + .cloned() + .collect_vec(); } fn should_auto_add_space_view(&self, space_view_candidate: &SpaceView) -> bool { @@ -458,6 +515,7 @@ impl Viewport { ui.spacing_mut().item_spacing.x = re_ui::ReUi::view_padding(); egui_dock::DockArea::new(tree) + .id(egui::Id::new("space_view_dock")) .style(re_ui::egui_dock_style(ui.style())) .show_inside(ui, &mut tab_viewer); }); @@ -706,6 +764,7 @@ fn help_text_ui(ui: &mut egui::Ui, space_view: &SpaceView) { ViewCategory::BarChart => Some(crate::ui::view_bar_chart::HELP_TEXT), ViewCategory::Spatial => Some(space_view.view_state.state_spatial.help_text()), ViewCategory::Text | ViewCategory::Tensor => None, + ViewCategory::NodeGraph => None, }; if let Some(help_text) = help_text { diff --git a/crates/re_viewer/src/viewer_analytics.rs b/crates/re_viewer/src/viewer_analytics.rs index 8269f3c19cc4..02727b590127 100644 --- a/crates/re_viewer/src/viewer_analytics.rs +++ b/crates/re_viewer/src/viewer_analytics.rs @@ -1,4 +1,4 @@ -//! All telemetry analytics collected by the Rerun Viewer are defined in this file for easy auditing. +//! All telemetry analytics collected by the Depthai Viewer are defined in this file for easy auditing. //! //! There are two exceptions: //! * `crates/rerun/src/crash_handler.rs` sends anonymized callstacks on crashes @@ -67,7 +67,7 @@ impl ViewerAnalytics { // ---------------------------------------------------------------------------- -/// Here follows all the analytics collected by the Rerun Viewer. +/// Here follows all the analytics collected by the Depthai Viewer. #[cfg(all(not(target_arch = "wasm32"), feature = "analytics"))] impl ViewerAnalytics { /// When the viewer is first started diff --git a/crates/re_viewer/src/web.rs b/crates/re_viewer/src/web.rs index 58dcec080ce2..65cf3312ce8c 100644 --- a/crates/re_viewer/src/web.rs +++ b/crates/re_viewer/src/web.rs @@ -24,8 +24,9 @@ pub async fn start( let web_options = eframe::WebOptions { follow_system_theme: false, - default_theme: eframe::Theme::Dark, + default_theme: eframe::Theme::Light, wgpu_options: crate::wgpu_options(), + depth_buffer: 0, }; eframe::start_web( @@ -38,7 +39,7 @@ pub async fn start( let startup_options = crate::StartupOptions { memory_limit: re_memory::MemoryLimit { // On wasm32 we only have 4GB of memory to play around with. - limit: Some(3_500_000_000), + limit: Some(2_500_000_000), }, persist_state, }; diff --git a/crates/re_web_viewer_server/build.rs b/crates/re_web_viewer_server/build.rs index b072925e41e5..79b8fe048136 100644 --- a/crates/re_web_viewer_server/build.rs +++ b/crates/re_web_viewer_server/build.rs @@ -89,19 +89,29 @@ impl<'a> Packages<'a> { } } +fn get_and_track_env_var(env_var_name: &str) -> Result { + println!("cargo:rerun-if-env-changed={env_var_name}"); + std::env::var(env_var_name) +} + +fn is_tracked_env_var_set(env_var_name: &str) -> bool { + let var = get_and_track_env_var(env_var_name).map(|v| v.to_lowercase()); + var == Ok("1".to_owned()) || var == Ok("yes".to_owned()) || var == Ok("true".to_owned()) +} + fn main() { - if std::env::var("IS_IN_RERUN_WORKSPACE") != Ok("yes".to_owned()) { + if !is_tracked_env_var_set("IS_IN_RERUN_WORKSPACE") { // Only run if we are in the rerun workspace, not on users machines. return; } - if std::env::var("RERUN_IS_PUBLISHING") == Ok("yes".to_owned()) { + if is_tracked_env_var_set("RERUN_IS_PUBLISHING") { // We don't need to rebuild - we should have done so beforehand! // See `RELEASES.md` return; } // Rebuild the web-viewer Wasm, - // because the web_server library bundles it with `include_bytes!` + // because the web_server library bundles it with `include_bytes!`. let metadata = MetadataCommand::new() .features(CargoOpt::AllFeatures) @@ -118,12 +128,12 @@ fn main() { // or patched!). pkgs.track_implicit_dep("re_viewer"); - if std::env::var("CARGO_FEATURE___CI").is_ok() { + if get_and_track_env_var("CARGO_FEATURE___CI").is_ok() { // If the `__ci` feature is set we skip building the web viewer wasm, saving a lot of time. // This feature is set on CI (hence the name), but also with `--all-features`, which is set by rust analyzer, bacon, etc. eprintln!("__ci feature detected: Skipping building of web viewer wasm."); } else { let release = std::env::var("PROFILE").unwrap() == "release"; - re_build_web_viewer::build(release); + re_build_web_viewer::build(release, is_tracked_env_var_set("RERUN_BUILD_WEBGPU")); } } diff --git a/crates/re_web_viewer_server/src/lib.rs b/crates/re_web_viewer_server/src/lib.rs index 4d8f29172588..e6964931c490 100644 --- a/crates/re_web_viewer_server/src/lib.rs +++ b/crates/re_web_viewer_server/src/lib.rs @@ -211,7 +211,7 @@ impl WebViewerServer { pub fn new(port: WebViewerServerPort) -> Result { let bind_addr = format!("0.0.0.0:{port}").parse()?; let server = hyper::Server::try_bind(&bind_addr) - .map_err(|e| WebViewerServerError::BindFailed(port, e))? + .map_err(|err| WebViewerServerError::BindFailed(port, err))? .serve(MakeSvc); Ok(Self { server }) } diff --git a/crates/re_ws_comms/src/server.rs b/crates/re_ws_comms/src/server.rs index d6fde09d2851..d8b3e4acc900 100644 --- a/crates/re_ws_comms/src/server.rs +++ b/crates/re_ws_comms/src/server.rs @@ -34,7 +34,7 @@ impl RerunServer { let listener = TcpListener::bind(&bind_addr) .await - .map_err(|e| RerunServerError::BindFailed(port, e))?; + .map_err(|err| RerunServerError::BindFailed(port, err))?; let port = RerunServerPort(listener.local_addr()?.port()); diff --git a/crates/rerun/Cargo.toml b/crates/rerun/Cargo.toml index 15e31264d47f..644912219ab4 100644 --- a/crates/rerun/Cargo.toml +++ b/crates/rerun/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "rerun" authors.workspace = true -default-run = "rerun" +default-run = "depthai-viewer" description = "Log images, point clouds, etc, and visualize them effortlessly" edition.workspace = true homepage.workspace = true @@ -17,6 +17,9 @@ version.workspace = true all-features = true targets = ["x86_64-unknown-linux-gnu", "wasm32-unknown-unknown"] +[[bin]] +name = "depthai-viewer" +path = "src/main.rs" [features] default = ["analytics", "glam", "image", "native_viewer", "server", "sdk"] diff --git a/crates/rerun/src/lib.rs b/crates/rerun/src/lib.rs index ae017c1c4fd9..f9193b9448a7 100644 --- a/crates/rerun/src/lib.rs +++ b/crates/rerun/src/lib.rs @@ -13,7 +13,7 @@ //! - [Rust API docs](https://docs.rs/rerun/) //! - [Troubleshooting](https://www.rerun.io/docs/getting-started/troubleshooting) //! -//! There are many different ways of sending data to the Rerun Viewer depending on what you're +//! There are many different ways of sending data to the Depthai Viewer depending on what you're //! trying to achieve and whether the viewer is running in the same process as your code, in //! another process, or even as a separate web application. //! diff --git a/crates/rerun/src/native_viewer.rs b/crates/rerun/src/native_viewer.rs index e1d5fc1de4e0..478548501ef7 100644 --- a/crates/rerun/src/native_viewer.rs +++ b/crates/rerun/src/native_viewer.rs @@ -31,6 +31,7 @@ where .spawn(move || run(session)) .expect("Failed to spawn thread"); + // NOTE: Some platforms still mandate that the UI must run on the main thread, so make sure // to spawn the viewer in place and migrate the user callback to a new thread. re_viewer::run_native_app(Box::new(move |cc, re_ui| { diff --git a/crates/rerun/src/run.rs b/crates/rerun/src/run.rs index 979c05705ff7..f2d9daee9fe3 100644 --- a/crates/rerun/src/run.rs +++ b/crates/rerun/src/run.rs @@ -15,7 +15,7 @@ use crate::web_viewer::host_web_viewer; // Note the extra blank lines between the point-lists below: it is required by `clap`. -/// The Rerun Viewer and Server +/// The Depthai Viewer and Server /// /// Features: /// @@ -441,7 +441,7 @@ async fn run_impl( app.set_profiler(profiler); Box::new(app) })) - .map_err(|e| e.into()); + .map_err(|err| err.into()); #[cfg(not(feature = "native_viewer"))] { diff --git a/examples/.gitignore b/examples/.gitignore new file mode 100644 index 000000000000..76e49aa15754 --- /dev/null +++ b/examples/.gitignore @@ -0,0 +1 @@ +out.rrd diff --git a/examples/python/README.md b/examples/python/README.md index 7cfc35518eea..4f41a9caf564 100644 --- a/examples/python/README.md +++ b/examples/python/README.md @@ -1,16 +1,19 @@ # Rerun Python Examples + The simplest example is [`minimal`](minimal/main.py). You may want to start there! Read more about our examples at . ## Setup -First install the Rerun Python SDK with `pip install rerun-sdk` + +First install the Rerun Python SDK with `pip install depthai-viewer` > Note: Make sure your SDK version matches the code in the examples. -For example, if your SDK version is `0.4.0`, check out the matching tag -for this repository by running `git checkout v0.4.0`. +> For example, if your SDK version is `0.4.0`, check out the matching tag +> for this repository by running `git checkout v0.4.0`. ## Dependencies + Each example comes with its own set of dependencies listed in a `requirements.txt` file. For example, to install dependencies and run the toy `car` example (which doesn't need to download any data) run: ```sh @@ -25,6 +28,7 @@ pip install -r examples/python/requirements.txt ``` ## Running the examples + By default, the examples spawn a Rerun Viewer and stream log data to it. You can instead save the log data to an `.rrd` file using `examples/python/car/main.py --save data.rrd`. You can then open that `.rrd` file with `rerun data.rrd`. @@ -34,9 +38,11 @@ You can instead save the log data to an `.rrd` file using `examples/python/car/m NOTE: `.rrd` files do not yet guarantee any backwards or forwards compatibility. One version of Rerun will likely not be able to open an `.rrd` file generated by another Rerun version. ## Datasets + Some examples will download a small datasets before they run. They will do so the first time you run the example. The datasets will be added to a subdir called `dataset`, which is in the repo-wide `.gitignore`. ## Contributions welcome + Feel free to open a PR to add a new example! See [`CONTRIBUTING.md`](../CONTRIBUTING.md) for details on how to contribute. diff --git a/examples/python/api_demo/main.py b/examples/python/api_demo/main.py index aff55a99bab8..eb79c9928d14 100755 --- a/examples/python/api_demo/main.py +++ b/examples/python/api_demo/main.py @@ -333,7 +333,8 @@ def main() -> None: ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "api_demo") diff --git a/examples/python/api_demo/requirements.txt b/examples/python/api_demo/requirements.txt index 49dd44f9f951..1d84a2910cbd 100644 --- a/examples/python/api_demo/requirements.txt +++ b/examples/python/api_demo/requirements.txt @@ -1,4 +1,4 @@ numpy opencv-python -rerun-sdk +depthai-viewer scipy diff --git a/examples/python/arkitscenes/download_dataset.py b/examples/python/arkitscenes/download_dataset.py index d2f5e2cbe62a..de0e07634e31 100644 --- a/examples/python/arkitscenes/download_dataset.py +++ b/examples/python/arkitscenes/download_dataset.py @@ -3,6 +3,7 @@ import math import os import subprocess +import zipfile from pathlib import Path from typing import Final, List, Optional @@ -121,9 +122,9 @@ def download_file(url: str, file_name: str, dst: Path) -> bool: def unzip_file(file_name: str, dst: Path, keep_zip: bool = True) -> bool: filepath = os.path.join(dst, file_name) print(f"Unzipping zip file {filepath}") - command = f"unzip -oq {filepath} -d {dst}" try: - subprocess.check_call(command, shell=True) + with zipfile.ZipFile(filepath, "r") as zip_ref: + zip_ref.extractall(dst) except Exception as error: print(f"Error unzipping {filepath}, error: {error}") return False @@ -306,6 +307,7 @@ def ensure_recording_available(video_id: str, include_highres: bool) -> Path: Returns the path to the recording for a given video_id. Args: + ---- video_id (str): Identifier for the recording. Returns diff --git a/examples/python/arkitscenes/main.py b/examples/python/arkitscenes/main.py index e05277eff918..28b2f1a0c02b 100755 --- a/examples/python/arkitscenes/main.py +++ b/examples/python/arkitscenes/main.py @@ -2,7 +2,7 @@ import argparse import json import os -from pathlib import Path +from pathlib import Path, PosixPath from typing import Any, Dict, List, Tuple import cv2 @@ -251,7 +251,7 @@ def log_camera( rr.log_rigid3( # pathlib makes it easy to get the parent, but log_rigid requires a string - str(Path(entity_id).parent), + str(PosixPath(entity_id).parent), child_from_parent=camera_from_world, xyz="RDF", # X=Right, Y=Down, Z=Forward ) @@ -263,6 +263,7 @@ def read_camera_from_world(traj_string: str) -> Tuple[str, Tuple[npt.NDArray[np. Reads out camera_from_world transform from trajectory string. Args: + ---- traj_string: A space-delimited file where each line represents a camera position at a particular timestamp. The file has seven columns: * Column 1: timestamp @@ -309,6 +310,7 @@ def log_arkit(recording_path: Path, include_highres: bool) -> None: Logs ARKit recording data using Rerun. Args: + ---- recording_path (Path): The path to the ARKit recording. Returns @@ -431,7 +433,8 @@ def main() -> None: help="Include the high resolution camera and depth images", ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "arkitscenes") recording_path = ensure_recording_available(args.video_id, args.include_highres) diff --git a/examples/python/arkitscenes/requirements.txt b/examples/python/arkitscenes/requirements.txt index 0629c41c76b3..bf77809429fe 100644 --- a/examples/python/arkitscenes/requirements.txt +++ b/examples/python/arkitscenes/requirements.txt @@ -1,7 +1,7 @@ numpy opencv-python pandas -rerun-sdk +depthai-viewer scipy tqdm trimesh diff --git a/examples/python/car/main.py b/examples/python/car/main.py index 8745f064a026..a86bf23c70ec 100755 --- a/examples/python/car/main.py +++ b/examples/python/car/main.py @@ -255,7 +255,8 @@ def generate_car_data(num_frames: int) -> Iterator[SampleFrame]: def main() -> None: parser = argparse.ArgumentParser(description="Logs rich data using the Rerun SDK.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "car") log_car_data() diff --git a/examples/python/car/requirements.txt b/examples/python/car/requirements.txt index 13a25792d4ee..554e9b09870d 100644 --- a/examples/python/car/requirements.txt +++ b/examples/python/car/requirements.txt @@ -1,3 +1,3 @@ numpy opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) -rerun-sdk +depthai-viewer diff --git a/examples/python/clock/main.py b/examples/python/clock/main.py index 1eff8821373c..4501a0ad23db 100755 --- a/examples/python/clock/main.py +++ b/examples/python/clock/main.py @@ -70,7 +70,8 @@ def rotate(angle: float, len: float) -> Tuple[float, float, float]: ) parser.add_argument("--steps", type=int, default=10_000, help="The number of time steps to log") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "clock") log_clock(args.steps) diff --git a/examples/python/clock/requirements.txt b/examples/python/clock/requirements.txt index fa4ff5da669a..24975fd48cc3 100644 --- a/examples/python/clock/requirements.txt +++ b/examples/python/clock/requirements.txt @@ -1,2 +1,2 @@ numpy -rerun-sdk +depthai-viewer diff --git a/examples/python/colmap/main.py b/examples/python/colmap/main.py index cb83a0e800ea..832d68f89346 100755 --- a/examples/python/colmap/main.py +++ b/examples/python/colmap/main.py @@ -182,7 +182,8 @@ def main() -> None: ) parser.add_argument("--resize", action="store", help="Target resolution to resize images") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] if args.resize: args.resize = tuple(int(x) for x in args.resize.split("x")) diff --git a/examples/python/colmap/requirements.txt b/examples/python/colmap/requirements.txt index 4f65e12c4d02..f7d4d3ff824f 100644 --- a/examples/python/colmap/requirements.txt +++ b/examples/python/colmap/requirements.txt @@ -1,5 +1,5 @@ opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) numpy requests==2.28.1 -rerun-sdk +depthai-viewer tqdm diff --git a/examples/python/deep_sdf/main.py b/examples/python/deep_sdf/main.py index 7bc91a827242..94a3fea77595 100755 --- a/examples/python/deep_sdf/main.py +++ b/examples/python/deep_sdf/main.py @@ -204,7 +204,8 @@ def main() -> None: help="Path to a mesh to analyze. If set, overrides the `--mesh` argument.", ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "deep_sdf") diff --git a/examples/python/deep_sdf/requirements.txt b/examples/python/deep_sdf/requirements.txt index bc5299798aa4..c13df7f10912 100644 --- a/examples/python/deep_sdf/requirements.txt +++ b/examples/python/deep_sdf/requirements.txt @@ -1,6 +1,6 @@ mesh_to_sdf===0.0.14 numpy requests==2.28.1 -rerun-sdk +depthai-viewer scikit-learn==1.1.3 trimesh==3.15.2 diff --git a/examples/python/dicom/main.py b/examples/python/dicom/main.py index 7b35e94baf85..892b43b39434 100755 --- a/examples/python/dicom/main.py +++ b/examples/python/dicom/main.py @@ -74,7 +74,8 @@ def ensure_dataset_downloaded() -> Iterable[Path]: if __name__ == "__main__": parser = argparse.ArgumentParser(description="Logs rich data using the Rerun SDK.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "dicom") dicom_files = ensure_dataset_downloaded() read_and_log_dicom_dataset(dicom_files) diff --git a/examples/python/dicom/requirements.txt b/examples/python/dicom/requirements.txt index e6a8f0befd54..84e50f295110 100644 --- a/examples/python/dicom/requirements.txt +++ b/examples/python/dicom/requirements.txt @@ -2,5 +2,5 @@ dicom_numpy==0.6.2 numpy pydicom==2.3.0 requests==2.28.1 -rerun-sdk +depthai-viewer types-requests==2.28.11 diff --git a/examples/python/dna/main.py b/examples/python/dna/main.py index 3294ad7980b9..a2d6603fc7bf 100755 --- a/examples/python/dna/main.py +++ b/examples/python/dna/main.py @@ -6,7 +6,6 @@ `examples/python/dna/main.py` """ -import sys from math import tau import numpy as np @@ -15,8 +14,8 @@ from rerun_demo.util import bounce_lerp, interleave from scipy.spatial.transform import Rotation -# sanity-check since all other example scripts take arguments: -assert len(sys.argv) == 1, f"{sys.argv[0]} does not take any arguments" +_, unknown = __import__("argparse").ArgumentParser().parse_known_args() +[__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.init("DNA Abacus") diff --git a/examples/python/dna/requirements.txt b/examples/python/dna/requirements.txt index 8349c316dcbb..02605412d748 100644 --- a/examples/python/dna/requirements.txt +++ b/examples/python/dna/requirements.txt @@ -1,3 +1,3 @@ numpy -rerun-sdk +depthai-viewer scipy diff --git a/examples/python/minimal/main.py b/examples/python/minimal/main.py index 2beff431ed9f..92e45a01b000 100755 --- a/examples/python/minimal/main.py +++ b/examples/python/minimal/main.py @@ -2,18 +2,16 @@ """Demonstrates the most barebone usage of the Rerun SDK.""" -import sys import numpy as np import rerun as rr +_, unknown = __import__("argparse").ArgumentParser().parse_known_args() +[__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] + rr.spawn() positions = np.vstack([xyz.ravel() for xyz in np.mgrid[3 * [slice(-5, 5, 10j)]]]).T colors = np.vstack([rgb.ravel() for rgb in np.mgrid[3 * [slice(0, 255, 10j)]]]).astype(np.uint8).T rr.log_points("my_points", positions=positions, colors=colors) - - -# sanity-check since all other example scripts take arguments: -assert len(sys.argv) == 1, f"{sys.argv[0]} does not take any arguments" diff --git a/examples/python/minimal/requirements.txt b/examples/python/minimal/requirements.txt index fa4ff5da669a..24975fd48cc3 100644 --- a/examples/python/minimal/requirements.txt +++ b/examples/python/minimal/requirements.txt @@ -1,2 +1,2 @@ numpy -rerun-sdk +depthai-viewer diff --git a/examples/python/mp_pose/main.py b/examples/python/mp_pose/main.py index c2d91117c631..89de0eb97f59 100755 --- a/examples/python/mp_pose/main.py +++ b/examples/python/mp_pose/main.py @@ -150,7 +150,8 @@ def main() -> None: parser.add_argument("--no-segment", action="store_true", help="Don't run person segmentation.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "mp_pose") video_path = args.video_path # type: str diff --git a/examples/python/mp_pose/requirements.txt b/examples/python/mp_pose/requirements.txt index f13236af560e..26207c819626 100644 --- a/examples/python/mp_pose/requirements.txt +++ b/examples/python/mp_pose/requirements.txt @@ -3,4 +3,4 @@ mediapipe>=0.8.11; platform_system != "Darwin" and platform.machine != 'arm64' numpy opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) requests -rerun-sdk +depthai-viewer diff --git a/examples/python/multiprocessing/main.py b/examples/python/multiprocessing/main.py index 7261981cbd25..4c849bdbf8b0 100755 --- a/examples/python/multiprocessing/main.py +++ b/examples/python/multiprocessing/main.py @@ -25,13 +25,19 @@ def task(title: str) -> None: def main() -> None: parser = argparse.ArgumentParser(description="Test multi-process logging to the same Rerun server") - parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.init("multiprocessing") rr.spawn(connect=False) # this is the viewer that each process will connect to task("main_task") + # Using multiprocessing with "fork" results in a hang on shutdown so + # always use "spawn" + # TODO(https://github.com/rerun-io/rerun/issues/1921) + multiprocessing.set_start_method("spawn") + p = multiprocessing.Process(target=task, args=("child_task",)) p.start() p.join() diff --git a/examples/python/multiprocessing/requirements.txt b/examples/python/multiprocessing/requirements.txt index ebb847ff0d2d..a6bfff6cfe23 100644 --- a/examples/python/multiprocessing/requirements.txt +++ b/examples/python/multiprocessing/requirements.txt @@ -1 +1 @@ -rerun-sdk +depthai-viewer diff --git a/examples/python/multithreading/main.py b/examples/python/multithreading/main.py index eeb6f2ed9f49..61244e1722b3 100755 --- a/examples/python/multithreading/main.py +++ b/examples/python/multithreading/main.py @@ -22,15 +22,21 @@ def rect_logger(path: str, color: npt.NDArray[np.float32]) -> None: def main() -> None: parser = argparse.ArgumentParser(description="Logs rich data using the Rerun SDK.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "multithreading") + threads = [] for i in range(10): t = threading.Thread( target=rect_logger, args=("thread/{}".format(i), [random.randrange(255) for _ in range(3)]) ) t.start() + threads.append(t) + + for t in threads: + t.join() rr.script_teardown(args) diff --git a/examples/python/multithreading/requirements.txt b/examples/python/multithreading/requirements.txt index fa4ff5da669a..24975fd48cc3 100644 --- a/examples/python/multithreading/requirements.txt +++ b/examples/python/multithreading/requirements.txt @@ -1,2 +1,2 @@ numpy -rerun-sdk +depthai-viewer diff --git a/examples/python/notebook/cube.ipynb b/examples/python/notebook/cube.ipynb index d04f1be19908..ba5f4d9585b7 100644 --- a/examples/python/notebook/cube.ipynb +++ b/examples/python/notebook/cube.ipynb @@ -177,7 +177,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "STEPS = 100\n", "twists = math.pi * np.sin(np.linspace(0, math.tau, STEPS)) / 4\n", "for t in range(STEPS):\n", diff --git a/examples/python/notebook/requirements.txt b/examples/python/notebook/requirements.txt index 1e5f56badec1..c6823954fce9 100644 --- a/examples/python/notebook/requirements.txt +++ b/examples/python/notebook/requirements.txt @@ -1,2 +1,2 @@ jupyter -rerun-sdk +depthai-viewer diff --git a/examples/python/nyud/main.py b/examples/python/nyud/main.py index da577c687d07..f35cbab3c523 100755 --- a/examples/python/nyud/main.py +++ b/examples/python/nyud/main.py @@ -160,7 +160,8 @@ def download_progress(url: str, dst: Path) -> None: ) parser.add_argument("--subset-idx", type=int, default=0, help="The index of the subset of the recording to use.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "nyud") recording_path = ensure_recording_downloaded(args.recording) diff --git a/examples/python/nyud/requirements.txt b/examples/python/nyud/requirements.txt index 1732988759cd..e9c0ebefe4c2 100644 --- a/examples/python/nyud/requirements.txt +++ b/examples/python/nyud/requirements.txt @@ -1,5 +1,5 @@ numpy opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) requests -rerun-sdk +depthai-viewer tqdm diff --git a/examples/python/objectron/main.py b/examples/python/objectron/main.py index f4a8efc60a45..a3b2f1e57684 100755 --- a/examples/python/objectron/main.py +++ b/examples/python/objectron/main.py @@ -283,7 +283,8 @@ def main() -> None: ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "objectron") diff --git a/examples/python/objectron/requirements.txt b/examples/python/objectron/requirements.txt index a8d14bd1af00..154defde3963 100644 --- a/examples/python/objectron/requirements.txt +++ b/examples/python/objectron/requirements.txt @@ -2,5 +2,5 @@ betterproto[compiler] numpy opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) requests -rerun-sdk +depthai-viewer scipy diff --git a/examples/python/opencv_canny/main.py b/examples/python/opencv_canny/main.py index 6220d7ef1c9f..0a6acc59851c 100755 --- a/examples/python/opencv_canny/main.py +++ b/examples/python/opencv_canny/main.py @@ -22,18 +22,22 @@ """ import argparse +from typing import Optional import cv2 import rerun as rr -def run_canny() -> None: +def run_canny(num_frames: Optional[int]) -> None: # Create a new video capture cap = cv2.VideoCapture(0) frame_nr = 0 while cap.isOpened(): + if num_frames and frame_nr >= num_frames: + break + # Read the frame ret, img = cap.read() if not ret: @@ -66,9 +70,11 @@ def main() -> None: parser.add_argument( "--device", type=int, default=0, help="Which camera device to use. (Passed to `cv2.VideoCapture()`)" ) + parser.add_argument("--num-frames", type=int, default=None, help="The number of frames to log") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "opencv_canny") @@ -93,7 +99,7 @@ def main() -> None: """ ) - run_canny() + run_canny(args.num_frames) rr.script_teardown(args) diff --git a/examples/python/opencv_canny/requirements.txt b/examples/python/opencv_canny/requirements.txt index 51d3a8cf0abf..2cabd331384f 100644 --- a/examples/python/opencv_canny/requirements.txt +++ b/examples/python/opencv_canny/requirements.txt @@ -1,2 +1,2 @@ opencv-python -rerun-sdk +depthai-viewer diff --git a/examples/python/plots/main.py b/examples/python/plots/main.py index 5f4d327fd746..065603126952 100755 --- a/examples/python/plots/main.py +++ b/examples/python/plots/main.py @@ -85,7 +85,8 @@ def main() -> None: description="demonstrates how to integrate python's native `logging` with the Rerun SDK" ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "plot") diff --git a/examples/python/plots/requirements.txt b/examples/python/plots/requirements.txt index fa4ff5da669a..24975fd48cc3 100644 --- a/examples/python/plots/requirements.txt +++ b/examples/python/plots/requirements.txt @@ -1,2 +1,2 @@ numpy -rerun-sdk +depthai-viewer diff --git a/examples/python/raw_mesh/main.py b/examples/python/raw_mesh/main.py index 5978060e0902..8ca94e08501f 100755 --- a/examples/python/raw_mesh/main.py +++ b/examples/python/raw_mesh/main.py @@ -84,7 +84,8 @@ def main() -> None: help="Path to a scene to analyze. If set, overrides the `--scene` argument.", ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "raw_mesh") diff --git a/examples/python/raw_mesh/requirements.txt b/examples/python/raw_mesh/requirements.txt index 746082ef66e9..d8b9987e3364 100644 --- a/examples/python/raw_mesh/requirements.txt +++ b/examples/python/raw_mesh/requirements.txt @@ -1,5 +1,5 @@ numpy requests==2.28.1 -rerun-sdk +depthai-viewer trimesh==3.15.2 pillow diff --git a/examples/python/ros/requirements.txt b/examples/python/ros/requirements.txt index 5c08038be119..c3abe45c4b0d 100644 --- a/examples/python/ros/requirements.txt +++ b/examples/python/ros/requirements.txt @@ -1,4 +1,4 @@ numpy opencv-python -rerun-sdk +depthai-viewer yourdfpy diff --git a/examples/python/segment_anything/main.py b/examples/python/segment_anything/main.py index ad2069840f54..e55b6611855c 100755 --- a/examples/python/segment_anything/main.py +++ b/examples/python/segment_anything/main.py @@ -181,7 +181,8 @@ def main() -> None: parser.add_argument("images", metavar="N", type=str, nargs="*", help="A list of images to process.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "segment_anything") logging.getLogger().addHandler(rr.LoggingHandler("logs")) diff --git a/examples/python/segment_anything/requirements.txt b/examples/python/segment_anything/requirements.txt index 9c0dfad84fa8..23bf59b9dec8 100644 --- a/examples/python/segment_anything/requirements.txt +++ b/examples/python/segment_anything/requirements.txt @@ -2,7 +2,7 @@ numpy opencv-python requests -rerun-sdk +depthai-viewer torch torchvision tqdm diff --git a/examples/python/stable_diffusion/main.py b/examples/python/stable_diffusion/main.py index 9452ae3bd837..7e7e1d50c2cd 100755 --- a/examples/python/stable_diffusion/main.py +++ b/examples/python/stable_diffusion/main.py @@ -108,7 +108,8 @@ def main() -> None: ) rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "Depth Guided Stable Diffusion") diff --git a/examples/python/stable_diffusion/requirements.txt b/examples/python/stable_diffusion/requirements.txt index abc65b445d76..4582e9d86375 100644 --- a/examples/python/stable_diffusion/requirements.txt +++ b/examples/python/stable_diffusion/requirements.txt @@ -5,7 +5,7 @@ numpy packaging pillow requests==2.28.1 -rerun-sdk +depthai-viewer scipy torch>=1.13.0 transformers>=4.26.0 diff --git a/examples/python/text_logging/main.py b/examples/python/text_logging/main.py index c02a7ae3d6df..7e30ea3e5266 100755 --- a/examples/python/text_logging/main.py +++ b/examples/python/text_logging/main.py @@ -74,7 +74,8 @@ def main() -> None: ) parser.add_argument("--repeat", type=int, default=1, help="How many times do we want to run the log function?") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "text_logging") diff --git a/examples/python/text_logging/requirements.txt b/examples/python/text_logging/requirements.txt index ebb847ff0d2d..a6bfff6cfe23 100644 --- a/examples/python/text_logging/requirements.txt +++ b/examples/python/text_logging/requirements.txt @@ -1 +1 @@ -rerun-sdk +depthai-viewer diff --git a/examples/python/tracking_hf_opencv/main.py b/examples/python/tracking_hf_opencv/main.py index 2c783d0ba341..667af4034f67 100755 --- a/examples/python/tracking_hf_opencv/main.py +++ b/examples/python/tracking_hf_opencv/main.py @@ -393,7 +393,8 @@ def main() -> None: parser.add_argument("--dataset_dir", type=Path, default=DATASET_DIR, help="Directory to save example videos to.") parser.add_argument("--video_path", type=str, default="", help="Full path to video to run on. Overrides `--video`.") rr.script_add_args(parser) - args = parser.parse_args() + args, unknown = parser.parse_known_args() + [__import__("logging").warning(f"unknown arg: {arg}") for arg in unknown] rr.script_setup(args, "tracking_hf_opencv") diff --git a/examples/python/tracking_hf_opencv/requirements.txt b/examples/python/tracking_hf_opencv/requirements.txt index 6a9d1bb22a33..c4dfeec99773 100644 --- a/examples/python/tracking_hf_opencv/requirements.txt +++ b/examples/python/tracking_hf_opencv/requirements.txt @@ -3,7 +3,7 @@ opencv-contrib-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly opencv-python<4.6 # Avoid opencv-4.6 since it rotates images incorrectly (https://github.com/opencv/opencv/issues/22088) pillow requests==2.28.1 -rerun-sdk +depthai-viewer timm==0.6.11 torch>=1.13.0 transformers diff --git a/justfile b/justfile index 90a6b0deecff..c7301a1f1db7 100644 --- a/justfile +++ b/justfile @@ -31,11 +31,35 @@ py-dev-env: venv/bin/pip install -r rerun_py/requirements-lint.txt echo "Do 'source venv/bin/activate' to use the virtual environment!" -# Run all examples -py-run-all: py-build +# Run all examples with the specified args +py-run-all *ARGS: py-build #!/usr/bin/env bash set -euo pipefail - fd main.py | xargs -I _ sh -c "echo _ && python3 _" + find examples/python/ -name main.py | xargs -I _ sh -c 'cd $(dirname _) && echo $(pwd) && python3 main.py {{ARGS}} --num-frames=30 --steps=200' + +# Run all examples in the native viewer +py-run-all-native: py-run-all + +# Run all examples in the web viewer +py-run-all-web: + #!/usr/bin/env bash + set -euo pipefail + + function cleanup { + kill $(jobs -p) + } + trap cleanup EXIT + + cargo r -p rerun --all-features -- --web-viewer & + just py-run-all --connect + +# Run all examples, save them to disk as rrd, then view them natively +py-run-all-rrd *ARGS: + #!/usr/bin/env bash + set -euo pipefail + just py-run-all --save out.rrd + cargo r -p rerun --all-features -- + find examples/python/ -name main.py | xargs -I _ sh -c 'cd $(dirname _) && echo $(pwd) && cargo r -p rerun --all-features -- out.rrd' # Build and install the package into the venv py-build *ARGS: @@ -100,6 +124,12 @@ rs-lint: cargo doc --quiet --document-private-items --no-deps --all-features cargo test --quiet --doc --all-features # runs all doc-tests +# Run all examples with the specified args +rs-run-all *ARGS: + #!/usr/bin/env bash + set -euo pipefail + find examples/rust/ -name main.rs | xargs -I _ sh -c 'cd $(dirname _) && echo $(pwd) && cargo r' + ### TOML diff --git a/rerun_py/Cargo.toml b/rerun_py/Cargo.toml index a538ad7fba83..7ade8b14bb32 100644 --- a/rerun_py/Cargo.toml +++ b/rerun_py/Cargo.toml @@ -14,8 +14,8 @@ name = "rerun_bindings" # name of the .so library that the Python module will im [features] default = ["extension-module", "native_viewer"] -## The features we turn on when building the `rerun-sdk` PyPi package -## for . +## The features we turn on when building the `depthai-viewer` PyPi package +## for . pypi = ["extension-module", "native_viewer", "web_viewer"] ## We need to enable the `pyo3/extension-module` when building the SDK, diff --git a/rerun_py/README.md b/rerun_py/README.md index 9286cfa58153..8e628b549a1f 100644 --- a/rerun_py/README.md +++ b/rerun_py/README.md @@ -3,7 +3,6 @@ Rerun is an SDK for logging computer vision and robotics data paired with a visualizer for exploring that data over time. It lets you debug and understand the internal state and data of your systems with minimal code. -

Rerun Viewer

@@ -11,13 +10,14 @@ It lets you debug and understand the internal state and data of your systems wit ## Install ```sh -pip3 install rerun-sdk +pip3 install depthai-viewer ``` ℹ️ Note: -The Python module is called `rerun`, while the package published on PyPI is `rerun-sdk`. +The Python module is called `rerun`, while the package published on PyPI is `depthai-viewer`. ## Example + ```py import rerun as rr import numpy as np @@ -31,40 +31,44 @@ rr.log_points("my_points", positions=positions, colors=colors) ``` ## Resources -* [Quick start](https://www.rerun.io/docs/getting-started/python) -* [Python API docs](https://ref.rerun.io/docs/python) -* [Tutorial](https://www.rerun.io/docs/getting-started/logging-python) -* [Examples on GitHub](https://github.com/rerun-io/rerun/tree/latest/examples/python) -* [Troubleshooting](https://www.rerun.io/docs/getting-started/troubleshooting) -* [Discord Server](https://discord.com/invite/Gcm8BbTaAj) + +- [Quick start](https://www.rerun.io/docs/getting-started/python) +- [Python API docs](https://ref.rerun.io/docs/python) +- [Tutorial](https://www.rerun.io/docs/getting-started/logging-python) +- [Examples on GitHub](https://github.com/rerun-io/rerun/tree/latest/examples/python) +- [Troubleshooting](https://www.rerun.io/docs/getting-started/troubleshooting) +- [Discord Server](https://discord.com/invite/Gcm8BbTaAj) ## Logging and viewing in different processes You can run the viewer and logger in different processes. In one terminal, start up a viewer with a server that the SDK can connect to: + ```sh python3 -m rerun ``` In a second terminal, run the example with the `--connect` option: + ```sh python3 examples/python/car/main.py --connect ``` -------------------------- +--- # From Source Setup: -* Install the Rust toolchain: -* `git clone git@github.com:rerun-io/rerun.git && cd rerun` -* Run `./scripts/setup_dev.sh`. -* Make sure `cargo --version` prints `1.67.1` once you are done +- Install the Rust toolchain: +- `git clone git@github.com:rerun-io/rerun.git && cd rerun` +- Run `./scripts/setup_dev.sh`. +- Make sure `cargo --version` prints `1.67.1` once you are done ## Building -To build from source and install Rerun into your *current* Python environment run: + +To build from source and install Rerun into your _current_ Python environment run: ```sh python3 -m pip install --upgrade pip @@ -98,6 +102,7 @@ just py-build ``` ### Test + ```sh # Run the unit tests just py-test @@ -110,9 +115,11 @@ python examples/python/car/main.py ``` ## Building an installable Python Wheel + The Python bindings to the core Rust library are built using https://github.com/PyO3/pyo3. To build an installable Python wheel run: + ``` pip install -r rerun_py/requirements-build.txt maturin build -m rerun_py/Cargo.toml --release @@ -127,35 +134,41 @@ pip3 install target/wheels/*.whl ``` ## Viewing the docs locally + The rerun python docs are generated using `mkdocs` Install the doc requirements: + ``` pip install -r rerun_py/requirements-doc.txt ``` Serve the docs: + ```sh mkdocs serve -f rerun_py/mkdocs.yml -w rerun_py ``` + or + ```sh just py-docs-serve ``` For information on how the docs system works, see: [docs/docs.md](docs/docs.md) - ## Troubleshooting + You can run with `RUST_LOG=debug` to get more output out of the rerun SDK. If you are using an Apple-silicon Mac, make sure `rustc -vV` outputs `host: aarch64-apple-darwin`. If not, this should fix it: -``` sh +```sh rustup set default-host aarch64-apple-darwin && rustup install 1.67 ``` If you want to switch back, this is how: -``` sh + +```sh rustup set default-host x86_64-apple-darwin && rustup install 1.67 ``` diff --git a/rerun_py/docs/gen_common_index.py b/rerun_py/docs/gen_common_index.py index b159a49858bd..4742d2bbe4f0 100644 --- a/rerun_py/docs/gen_common_index.py +++ b/rerun_py/docs/gen_common_index.py @@ -8,8 +8,8 @@ -------- | ----------- [rerun.init()](initialization/#rerun.init) | Initialize the Rerun SDK ... [rerun.set_recording_id()](initialization/#rerun.set_recording_id) | Set the recording ID ... -[rerun.connect()](initialization/#rerun.connect) | Connect to a remote Rerun Viewer on the ... -[rerun.spawn()](initialization/#rerun.spawn) | Spawn a Rerun Viewer ... +[rerun.connect()](initialization/#rerun.connect) | Connect to a remote Depthai Viewer on the ... +[rerun.spawn()](initialization/#rerun.spawn) | Spawn a Depthai Viewer ... ... The Summary should look like: @@ -162,7 +162,7 @@ def make_slug(s: str) -> str: * [Examples on GitHub](https://github.com/rerun-io/rerun/tree/latest/examples/python) * [Troubleshooting](https://www.rerun.io/docs/getting-started/troubleshooting) -There are many different ways of sending data to the Rerun Viewer depending on what you're trying +There are many different ways of sending data to the Depthai Viewer depending on what you're trying to achieve and whether the viewer is running in the same process as your code, in another process, or even as a separate web application. diff --git a/rerun_py/pyproject.toml b/rerun_py/pyproject.toml index d60ea1b9f109..dc9d06414144 100644 --- a/rerun_py/pyproject.toml +++ b/rerun_py/pyproject.toml @@ -3,6 +3,14 @@ build-backend = "maturin" requires = ["maturin>=0.14.0,<0.15"] [project] +dependencies = [ + "deprecated", + "numpy>=1.23", + "pyarrow==10.0.1", + "ahrs", + "depthai", + "depthai-sdk", +] classifiers = [ "Programming Language :: Rust", "Programming Language :: Python :: Implementation :: CPython", @@ -11,15 +19,14 @@ classifiers = [ "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Scientific/Engineering :: Visualization", ] -dependencies = ["deprecated", "numpy>=1.23", "pyarrow==10.0.1"] description = "The Rerun Logging SDK" keywords = ["computer-vision", "logging", "rerun"] -name = "rerun-sdk" +name = "depthai-viewer" requires-python = ">=3.8" [[project.authors]] -email = "opensource@rerun.io" -name = "Rerun.io" +email = "filip.jeretina@luxonis.com" +name = "Luxonis" [project.license] text = "MIT OR Apache-2.0" @@ -30,7 +37,7 @@ tests = ["pytest==7.1.2"] [project.urls] documentation = "https://www.rerun.io/docs" homepage = "https://www.rerun.io" -repository = "https://github.com/rerun-io/rerun" +repository = "https://github.com/zrezke/rerun" [project.scripts] rerun = "rerun.__main__:main" @@ -93,4 +100,8 @@ ban-relative-imports = "all" # See https://github.com/rerun-io/rerun/pull/1085 for more details include = ["rerun_sdk.pth", "rerun_sdk/rerun_demo/colmap_fiat.rrd"] locked = true -python-packages = ["rerun_sdk/rerun", "rerun_sdk/rerun_demo"] +python-packages = [ + "rerun_sdk/rerun", + "rerun_sdk/rerun_demo", + "rerun_sdk/depthai_viewer_backend", +] diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/.gitignore b/rerun_py/rerun_sdk/depthai_viewer_backend/.gitignore new file mode 100644 index 000000000000..32b40b2abc94 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/.gitignore @@ -0,0 +1,5 @@ +.venv +.vscode +__pycache__ +rs_try +dev_experiments diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/README.md b/rerun_py/rerun_sdk/depthai_viewer_backend/README.md new file mode 100644 index 000000000000..50b0c2f6ca03 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/README.md @@ -0,0 +1,31 @@ +# Depthai Viewer backend + +Features: +- [x] color camera video + left/right camera video +- [x] color stereo depth stream +- [x] 3D point cloud +- [x] IMU values + charts so that users can check if it works properly +- [x] Discovery of available OAK cameras + switching between cameras (only 1 can be used at a time) +- [ ] Settings for cameras - various filters, IR, Laser projector - so that user can quickly check the performance of stereo + cameras +- [x] Dropdown list of few most interesting neural models - yolo, hand detection, ... + - [x] YOLO + - [x] Face detection + - [x] Age gender detection + - [ ] Human Pose +- [ ] Bandwidth statistics +Extra: +- [ ] recording/replay +- [ ] camera calibration - similar to Lukasz' calibration app +- [ ] detailed information about cameras and firmware upgrade +- [ ] visualization for VIO/SLAM when available - similar to Zedfu app + + +## Develop + +```sh +python3 install_requirements.py +``` + +```sh +source .venv/bin/activate +``` diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/__init__.py b/rerun_py/rerun_sdk/depthai_viewer_backend/__init__.py new file mode 100644 index 000000000000..ce743d4cd9af --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/__init__.py @@ -0,0 +1,3 @@ +from depthai_viewer_backend.back import DepthaiViewerBack + +DepthaiViewerBack() diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/back.py b/rerun_py/rerun_sdk/depthai_viewer_backend/back.py new file mode 100644 index 000000000000..58df996111df --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/back.py @@ -0,0 +1,294 @@ +import json +import threading +from queue import Empty as QueueEmptyException +from queue import Queue +from typing import Dict, Tuple + +import depthai as dai +import numpy as np +from depthai_sdk import OakCamera +from depthai_sdk.components import NNComponent +from depthai_sdk.components.pointcloud_component import PointcloudComponent +from depthai_sdk.oak_camera import CameraComponent + +from depthai_viewer_backend.config_api import start_api +from depthai_viewer_backend.device_configuration import PipelineConfiguration +from depthai_viewer_backend.sdk_callbacks import SdkCallbacks +from depthai_viewer_backend.store import Store + +color_wh_to_enum = { + (1280, 720): dai.ColorCameraProperties.SensorResolution.THE_720_P, + (1280, 800): dai.ColorCameraProperties.SensorResolution.THE_800_P, + (1920, 1080): dai.ColorCameraProperties.SensorResolution.THE_1080_P, + (3840, 2160): dai.ColorCameraProperties.SensorResolution.THE_4_K, + (4056, 3040): dai.ColorCameraProperties.SensorResolution.THE_12_MP, + (1440, 1080): dai.ColorCameraProperties.SensorResolution.THE_1440X1080, + (5312, 6000): dai.ColorCameraProperties.SensorResolution.THE_5312X6000, + # TODO(filip): Add other resolutions +} + +mono_wh_to_enum = { + (640, 400): dai.MonoCameraProperties.SensorResolution.THE_400_P, + (640, 480): dai.MonoCameraProperties.SensorResolution.THE_480_P, + (1280, 720): dai.MonoCameraProperties.SensorResolution.THE_720_P, + (1280, 800): dai.MonoCameraProperties.SensorResolution.THE_800_P, + (1920, 1200): dai.MonoCameraProperties.SensorResolution.THE_1200_P, +} + + +class SelectedDevice: + id: str + intrinsic_matrix: Dict[Tuple[int, int], np.ndarray] = {} + calibration_data: dai.CalibrationHandler = None + + _color: CameraComponent = None + _left: CameraComponent = None + _right: CameraComponent = None + _stereo: CameraComponent = None + _nnet: NNComponent = None + _pc: PointcloudComponent = None + + oak_cam: OakCamera = None + + def __init__(self, device_id: str): + self.id = device_id + self.oak_cam = OakCamera(self.id) + print("Oak cam: ", self.oak_cam) + + def get_intrinsic_matrix(self, width: int, height: int) -> np.ndarray: + if self.intrinsic_matrix.get((width, height)) is np.ndarray: + return self.intrinsic_matrix.get((width, height)) + M_right = self.calibration_data.getCameraIntrinsics(dai.CameraBoardSocket.RIGHT, dai.Size2f(width, height)) + self.intrinsic_matrix[(width, height)] = np.array(M_right).reshape(3, 3) + return self.intrinsic_matrix[(width, height)] + + def get_device_properties(self) -> Dict: + dai_props = self.oak_cam.device.getConnectedCameraFeatures() + device_properties = { + "id": self.id, + "supported_color_resolutions": [], + "supported_left_mono_resolutions": [], + "supported_right_mono_resolutions": [], + } + for cam in dai_props: + resolutions_key = "supported_left_mono_resolutions" + if cam.socket == dai.CameraBoardSocket.RGB: + resolutions_key = "supported_color_resolutions" + elif cam.socket == dai.CameraBoardSocket.RIGHT: + resolutions_key = "supported_right_mono_resolutions" + for config in cam.configs: + wh = (config.width, config.height) + if wh not in device_properties[resolutions_key]: + device_properties[resolutions_key].append((config.width, config.height)) + device_properties["supported_color_resolutions"] = list( + map( + lambda x: color_wh_to_enum[x].name, + sorted(device_properties["supported_color_resolutions"], key=lambda x: x[0] * x[1]), + ) + ) + device_properties["supported_left_mono_resolutions"] = list( + map( + lambda x: color_wh_to_enum[x].name, + sorted(device_properties["supported_left_mono_resolutions"], key=lambda x: x[0] * x[1]), + ) + ) + device_properties["supported_right_mono_resolutions"] = list( + map( + lambda x: color_wh_to_enum[x].name, + sorted(device_properties["supported_right_mono_resolutions"], key=lambda x: x[0] * x[1]), + ) + ) + return device_properties + + def update_pipeline(self, config: PipelineConfiguration, callbacks: "SdkCallbacks") -> Tuple[bool, str]: + if self.oak_cam.running(): + print("Cam running, closing...") + self.oak_cam.device.close() + self.oak_cam = None + try: + self.oak_cam = OakCamera(self.id) + except RuntimeError: + print("Failed to create oak camera") + self.oak_cam = None + return False, {"message": "Failed to create oak camera"} + if config.color_camera: + print("Creating color camera") + self._color = self.oak_cam.create_camera( + "color", config.color_camera.resolution, config.color_camera.fps, name="color", encode=True + ) + if config.color_camera.xout_video: + self.oak_cam.callback(self._color.out.camera, callbacks.on_color_frame) + if config.left_camera: + print("Creating left camera") + self._left = self.oak_cam.create_camera( + "left", config.left_camera.resolution, config.left_camera.fps, name="left" + ) + if config.left_camera.xout: + self.oak_cam.callback(self._left.out.camera, callbacks.on_left_frame) + if config.right_camera: + print("Creating right camera") + self._right = self.oak_cam.create_camera( + "right", config.right_camera.resolution, config.right_camera.fps, name="right" + ) + if config.right_camera.xout: + self.oak_cam.callback(self._right, callbacks.on_right_frame) + if config.depth: + print("Creating depth") + self._stereo = self.oak_cam.create_stereo(left=self._left, right=self._right, name="depth") + self._stereo.config_stereo( + lr_check=config.depth.lr_check, + subpixel=config.depth.subpixel_disparity, + subpixel_bits=5, + confidence=config.depth.confidence, + align=config.depth.align, + lr_check_threshold=config.depth.lrc_threshold, + median=config.depth.median, + ) + self.oak_cam.callback(self._stereo, callbacks.on_stereo_frame) + # if config.depth.pointcloud and config.depth.pointcloud.enabled: + # self._pc = self.oak_cam.create_pointcloud(stereo=self._stereo, colorize=self._color) + # self.oak_cam.callback(self._pc, callbacks.on_pointcloud) + + if config.imu: + print("Creating IMU") + imu = self.oak_cam.create_imu() + # TODO(filip): Sdk will handle sensors list on it's own + sensors = [ + dai.IMUSensor.ACCELEROMETER, + dai.IMUSensor.GYROSCOPE_CALIBRATED, + dai.IMUSensor.MAGNETOMETER_CALIBRATED, + dai.IMUSensor.ROTATION_VECTOR, + ] + imu.config_imu( + sensors, report_rate=config.imu.report_rate, batch_report_threshold=config.imu.batch_report_threshold + ) + self.oak_cam.callback(imu, callbacks.on_imu) + + if config.ai_model and config.ai_model.path: + if config.ai_model.path == "age-gender-recognition-retail-0013": + face_detection = self.oak_cam.create_nn("face-detection-retail-0004", self._color) + self._nnet = self.oak_cam.create_nn("age-gender-recognition-retail-0013", input=face_detection) + self.oak_cam.callback(self._nnet, callbacks.on_age_gender_packet) + elif config.ai_model.path == "mobilenet-ssd": + self._nnet = self.oak_cam.create_nn( + config.ai_model.path, + self._color, + ) + self.oak_cam.callback(self._nnet, callbacks.on_mobilenet_ssd_packet) + else: + self._nnet = self.oak_cam.create_nn(config.ai_model.path, self._color) + callback = callbacks.on_detections + if config.ai_model.path == "yolo-v3-tiny-tf": + callback = callbacks.on_yolo_packet + self.oak_cam.callback(self._nnet, callback) + try: + self.oak_cam.start(blocking=False) + except RuntimeError: + return False, {"message": "Couldn't start pipeline"} + running = self.oak_cam.running() + if running: + self.oak_cam.poll() + self.calibration_data = self.oak_cam.device.readCalibration() + self.intrinsic_matrix = {} + return running, {"message": "Pipeline started" if running else "Couldn't start pipeline"} + + +class DepthaiViewerBack: + _device: SelectedDevice = None + + # Queues for communicating with the API process + action_queue: Queue + result_queue: Queue + + # Sdk callbacks for handling data from the device and sending it to the frontend + sdk_callbacks: SdkCallbacks + + def __init__(self, compression: bool = False) -> None: + self.action_queue = Queue() + self.result_queue = Queue() + self.send_message_queue = Queue() + + self.store = Store() + self.store.on_update_pipeline = self.update_pipeline + self.store.on_select_device = self.select_device + self.store.on_reset = self.on_reset + + self.api_process = threading.Thread( + target=start_api, args=(self.action_queue, self.result_queue, self.send_message_queue) + ) + self.api_process.start() + + self.sdk_callbacks = SdkCallbacks(self.store) + self.run() + + def set_device(self, device: SelectedDevice | None): + self._device = device + if device: + self.sdk_callbacks.set_camera_intrinsics_getter(device.get_intrinsic_matrix) + + def on_reset(self) -> Tuple[bool, str]: + print("Resetting...") + if self._device: + print("Closing device...") + self._device.oak_cam.device.close() + self._device.oak_cam.__exit__(None, None, None) + self._device.oak_cam = None + self.set_device(None) + print("Done") + return True, {"message": "Reset successful"} + + def select_device(self, device_id: str) -> Tuple[bool, str]: + print("Selecting device: ", device_id) + if self._device: + self.on_reset() + if device_id == "": + return True, {"message": "Successfully unselected device", "device_properties": {}} + try: + self.set_device(SelectedDevice(device_id)) + except RuntimeError as e: + print("Failed to select device:", e) + return False, {"message": "Failed to select device", "device_properties": {}} + try: + device_properties = self._device.get_device_properties() + return True, {"message:": "Device selected successfully", "device_properties": device_properties} + except RuntimeError as e: + print("Failed to get device properties:", e) + self.on_reset() + print("Restarting backend...") + # For now exit the backend, the frontend will restart it + # (TODO(filip): Why does "Device already closed or disconnected: Input/output error happen") + exit(-1) + # return False, {"message": "Failed to get device properties", "device_properties": {}} + + def update_pipeline(self) -> bool: + if not self._device: + print("No device selected, can't update pipeline!") + return False, {"message": "No device selected, can't update pipeline!"} + print("Updating pipeline...") + started, message = self._device.update_pipeline(self.store.pipeline_config, callbacks=self.sdk_callbacks) + if not started: + self.set_device(None) + return started, {"message": message} + + def run(self): + """Handles ws messages and poll OakCam.""" + while True: + try: + action, kwargs = self.action_queue.get(timeout=0.001) + print("Handling action: ", action) + self.result_queue.put(self.store.handle_action(action, **kwargs)) + except QueueEmptyException: + pass + + if self._device and self._device.oak_cam: + self._device.oak_cam.poll() + if self._device.oak_cam.device.isClosed(): + # TODO(filip): Typehint the messages properly + self.on_reset() + self.send_message_queue.put( + json.dumps({"type": "Error", "data": {"action": "FullReset", "message": "Device disconnected"}}) + ) + + +if __name__ == "__main__": + back = DepthaiViewerBack() diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/classification_labels.py b/rerun_py/rerun_sdk/depthai_viewer_backend/classification_labels.py new file mode 100644 index 000000000000..7d8f6b7e1958 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/classification_labels.py @@ -0,0 +1,107 @@ +MOBILENET_LABELS = [ + "background", + "aeroplane", + "bicycle", + "bird", + "boat", + "bottle", + "bus", + "car", + "cat", + "chair", + "cow", + "diningtable", + "dog", + "horse", + "motorbike", + "person", + "pottedplant", + "sheep", + "sofa", + "train", + "tvmonitor", +] + + +YOLO_TINY_LABELS = [ + "person", + "bicycle", + "car", + "motorcycle", + "airplane", + "bus", + "train", + "truck", + "boat", + "trafficlight", + "firehydrant", + "stopsign", + "parkingmeter", + "bench", + "bird", + "cat", + "dog", + "horse", + "sheep", + "cow", + "elephant", + "bear", + "zebra", + "giraffe", + "backpack", + "umbrella", + "handbag", + "tie", + "suitcase", + "frisbee", + "skis", + "snowboard", + "sportsball", + "kite", + "baseballbat", + "baseballglove", + "skateboard", + "surfboard", + "tennisracket", + "bottle", + "wineglass", + "cup", + "fork", + "knife", + "spoon", + "bowl", + "banana", + "apple", + "sandwich", + "orange", + "broccoli", + "carrot", + "hotdog", + "pizza", + "donut", + "cake", + "chair", + "couch", + "pottedplant", + "bed", + "diningtable", + "toilet", + "tv", + "laptop", + "mouse", + "remote", + "keyboard", + "cellphone", + "microwave", + "oven", + "toaster", + "sink", + "refrigerator", + "book", + "clock", + "vase", + "scissors", + "teddybear", + "hairdrier", + "toothbrush", +] diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/config_api.py b/rerun_py/rerun_sdk/depthai_viewer_backend/config_api.py new file mode 100644 index 000000000000..446d865fb981 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/config_api.py @@ -0,0 +1,164 @@ +import asyncio +import json +from multiprocessing import Queue +from queue import Empty as QueueEmptyException +from signal import SIGINT, signal +from typing import Dict, Tuple + +import depthai as dai +import websockets +from websockets.server import WebSocketServerProtocol + +from depthai_viewer_backend.device_configuration import PipelineConfiguration +from depthai_viewer_backend.store import Action +from depthai_viewer_backend.topic import Topic + +signal(SIGINT, lambda *args, **kwargs: exit(0)) + +# Definitions for linting +# send actions to back +dispatch_action_queue: Queue = None +# bool indicating action success +result_queue: Queue = None +send_message_queue: Queue = None + + +def dispatch_action(action: Action, **kwargs) -> Tuple[bool, Dict[str, any]]: + """ + Dispatches an action that will be executed by store.py. + + Returns: (success: bool, result: Dict[str, any]). + """ + dispatch_action_queue.put((action, kwargs)) + return result_queue.get() + + +class MessageType: + SUBSCRIPTIONS = "Subscriptions" # Get or set subscriptions + PIPELINE = "Pipeline" # Get or Set pipeline + DEVICES = "Devices" # Get device list + DEVICE = "Device" # Get or set device + ERROR = "Error" # Error message + + +async def ws_api(websocket: WebSocketServerProtocol): + while True: + message = None + try: + message = await asyncio.wait_for(websocket.recv(), 1) + except asyncio.TimeoutError: + pass + except websockets.exceptions.ConnectionClosed: + success, message = dispatch_action(Action.RESET) + if success: + return + raise Exception("Couldn't reset backend after websocket disconnect!") + + if message: + try: + message = json.loads(message) + except json.JSONDecodeError: + print("Failed to parse message: ", message) + continue + message_type = message.get("type", None) + if not message_type: + print("Missing message type") + continue + print("Got message: ", message) + if message_type == MessageType.SUBSCRIPTIONS: + data = message.get("data", {}) + subscriptions = [Topic.create(topic_name) for topic_name in data.get(MessageType.SUBSCRIPTIONS, [])] + dispatch_action(Action.SET_SUBSCRIPTIONS, subscriptions=subscriptions) + print("Subscriptions: ", subscriptions) + active_subscriptions = [topic.name for topic in dispatch_action(Action.GET_SUBSCRIPTIONS) if topic] + await websocket.send(json.dumps({"type": MessageType.SUBSCRIPTIONS, "data": active_subscriptions})) + elif message_type == MessageType.PIPELINE: + data = message.get("data", {}) + pipeline_config = PipelineConfiguration(**data.get("Pipeline", {})) + print("Pipeline config: ", pipeline_config) + + success, result = dispatch_action(Action.UPDATE_PIPELINE, pipeline_config=pipeline_config) + if success: + active_config: PipelineConfiguration = dispatch_action(Action.GET_PIPELINE) + print("Active config: ", active_config) + await websocket.send( + json.dumps( + {"type": MessageType.PIPELINE, "data": active_config.to_json() if active_config else None} + ) + ) + else: + await websocket.send( + json.dumps( + { + "type": MessageType.ERROR, + "data": {"action": "FullReset", "message": result.get("message", "Unknown error")}, + } + ) + ) + elif message_type == MessageType.DEVICES: + await websocket.send( + json.dumps( + { + "type": MessageType.DEVICES, + "data": [d.getMxId() for d in dai.Device.getAllAvailableDevices()], + } + ) + ) + + elif message_type == MessageType.DEVICE: + data = message.get("data", {}) + device_repr = data.get("Device", {}) + device_id = device_repr.get("id", None) + if device_id is None: + print("Missing device id") + continue + success, result = dispatch_action(Action.SELECT_DEVICE, device_id=device_id) + if success: + print("Selected device properties: ", result.get("device_properties", None)) + await websocket.send( + json.dumps({"type": MessageType.DEVICE, "data": result.get("device_properties", {})}) + ) + else: + await websocket.send( + json.dumps( + { + "type": MessageType.ERROR, + "data": {"action": "FullReset", "message": result.get("message", "Unknown error")}, + } + ) + ) + + else: + print("Unknown message type: ", message_type) + continue + send_message = None + try: + send_message = send_message_queue.get(timeout=0.01) + except QueueEmptyException: + pass + if send_message: + print("Sending message: ", send_message) + await websocket.send(send_message) + + +async def main(): + async with websockets.serve(ws_api, "localhost", 9001): + await asyncio.Future() # run forever + + +def start_api(_dispatch_action_queue: Queue, _result_queue: Queue, _send_message_queue: Queue): + """ + Starts the websocket API. + + _dispatch_action_queue: Queue to send actions to store.py + _result_queue: Queue to get results from store.py + _send_message_queue: Queue to send messages to frontend. + """ + global dispatch_action_queue + dispatch_action_queue = _dispatch_action_queue + global result_queue + result_queue = _result_queue + global send_message_queue + send_message_queue = _send_message_queue + + asyncio.run(main()) diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/device_configuration.py b/rerun_py/rerun_sdk/depthai_viewer_backend/device_configuration.py new file mode 100644 index 000000000000..4fb0447dece9 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/device_configuration.py @@ -0,0 +1,183 @@ +from typing import Optional + +import depthai as dai +from depthai_sdk.previews import Previews as QueueNames +from pydantic import BaseModel + + +class ColorCameraConfiguration(BaseModel): + fps: Optional[int] = 30 + resolution: Optional[ + dai.ColorCameraProperties.SensorResolution + ] = dai.ColorCameraProperties.SensorResolution.THE_1080_P + board_socket: Optional[dai.CameraBoardSocket] = dai.CameraBoardSocket.RGB + out_preview: bool = False + xout_still: bool = False + xout_video: bool = True + input_control: bool = False + + class Config: + arbitrary_types_allowed = True + # Doesnt work atm + json_encoders = { + Optional[dai.MonoCameraProperties.SensorResolution]: lambda v: v.name, + dai.CameraBoardSocket: lambda v: v.name, + } + + def __init__(self, **v): + if v.get("resolution"): + v["resolution"] = getattr(dai.ColorCameraProperties.SensorResolution, v["resolution"]) + if v.get("board_socket"): + v["board_socket"] = getattr(dai.CameraBoardSocket, v["board_socket"]) + return super().__init__(**v) + + @property + # Make this select the queue based on ui, also probably not just one queue + def out_queue_name(self) -> str | None: + prefix = QueueNames.color.name + if self.out_preview: + return prefix + "_preview" + if self.xout_still: + return prefix + "_still" + if self.xout_video: + return prefix + "_video" + + +class MonoCameraConfiguration(BaseModel): + fps: Optional[int] = 30 + resolution: Optional[ + dai.MonoCameraProperties.SensorResolution + ] = dai.MonoCameraProperties.SensorResolution.THE_400_P + board_socket: Optional[dai.CameraBoardSocket] = dai.CameraBoardSocket.LEFT + xout: bool = False # Depth queue fails if I create this queue! + input_control: bool = False + + class Config: + arbitrary_types_allowed = True + # Doesnt work atm + json_encoders = { + Optional[dai.MonoCameraProperties.SensorResolution]: lambda v: v.name, + dai.CameraBoardSocket: lambda v: v.name, + } + + def __init__(self, **v): + if v.get("resolution"): + v["resolution"] = getattr(dai.MonoCameraProperties.SensorResolution, v["resolution"]) + if v.get("board_socket"): + v["board_socket"] = getattr(dai.CameraBoardSocket, v["board_socket"]) + return super().__init__(**v) + + @property + def out_queue_name(self) -> str: + return "left" if self.board_socket == dai.CameraBoardSocket.LEFT else "right" + + @classmethod + def create_left(cls, **kwargs): + return cls(board_socket="LEFT", **kwargs) + + @classmethod + def create_right(cls, **kwargs): + return cls(board_socket="RIGHT", **kwargs) + + +# class PointcloudConfiguration(BaseModel): +# enabled: bool = True + + +class DepthConfiguration(BaseModel): + median: Optional[dai.StereoDepthProperties.MedianFilter] = dai.StereoDepthProperties.MedianFilter.KERNEL_7x7 + lr_check: Optional[bool] = True + lrc_threshold: int = 5 # 0..10 + extended_disparity: Optional[bool] = False + subpixel_disparity: Optional[bool] = True + align: Optional[dai.CameraBoardSocket] = dai.CameraBoardSocket.RGB + sigma: int = 0 # 0..65535 + # pointcloud: PointcloudConfiguration | None = None + confidence: int = 230 + + class Config: + arbitrary_types_allowed = True + + def __init__(self, **v): + if v.get("median"): + v["median"] = getattr(dai.StereoDepthProperties.MedianFilter, v["median"]) + if v.get("align"): + v["align"] = getattr(dai.CameraBoardSocket, v["align"]) + return super().__init__(**v) + + def requires_rebuild(self, other: "DepthConfiguration") -> bool: + dont_require_rebuild = {"lrc_threshold", "sigma", "dct", "median"} + return len(dont_require_rebuild - self._get_modified_fields(other)) != 0 + + def _get_modified_fields(self, other: "DepthConfiguration") -> set[str]: + fields = set() + if self.dct != other.dct: + fields.add("dct") + if self.median != other.median: + fields.add("median") + if self.lr_check != other.lr_check: + fields.add("lr_check") + if self.lrc_threshold != other.lrc_threshold: + fields.add("lrc_threshold") + if self.extended_disparity != other.extended_disparity: + fields.add("extended_disparity") + if self.subpixel_disparity != other.subpixel_disparity: + fields.add("subpixel_disparity") + if self.align != other.align: + fields.add("align") + if self.sigma != other.sigma: + fields.add("sigma") + return fields + + @property + def out_queue_name(self) -> str: + return QueueNames.depthRaw.name + + +class AiModelConfiguration(BaseModel): + display_name: str + path: str + + +class ImuConfiguration(BaseModel): + report_rate: int = 100 + batch_report_threshold: int = 5 + + +class PipelineConfiguration(BaseModel): + color_camera: ColorCameraConfiguration = ColorCameraConfiguration() + left_camera: MonoCameraConfiguration = MonoCameraConfiguration.create_left() + right_camera: MonoCameraConfiguration = MonoCameraConfiguration.create_right() + depth: DepthConfiguration | None + ai_model: AiModelConfiguration | None + imu: ImuConfiguration = ImuConfiguration() + + def to_json(self): + as_dict = self.dict() + return self._fix_depthai_types(as_dict) + + def _fix_depthai_types(self, as_dict: dict): + """ATM Config.json_encoders doesn't work, so we manually fix convert the depthai types to strings here.""" + if as_dict.get("color_camera"): + as_dict["color_camera"] = self._fix_camera(as_dict["color_camera"]) + if as_dict.get("left_camera"): + as_dict["left_camera"] = self._fix_camera(as_dict["left_camera"]) + if as_dict.get("right_camera"): + as_dict["right_camera"] = self._fix_camera(as_dict["right_camera"]) + if as_dict.get("depth"): + as_dict["depth"] = self._fix_depth(as_dict["depth"]) + return as_dict + + def _fix_depth(self, as_dict: dict): + if as_dict.get("align"): + as_dict["align"] = as_dict["align"].name + if as_dict.get("median"): + as_dict["median"] = as_dict["median"].name + return as_dict + + def _fix_camera(self, as_dict: dict): + if as_dict.get("resolution"): + as_dict["resolution"] = as_dict["resolution"].name + if as_dict.get("board_socket"): + as_dict["board_socket"] = as_dict["board_socket"].name + return as_dict diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/pyproject.toml b/rerun_py/rerun_sdk/depthai_viewer_backend/pyproject.toml new file mode 100644 index 000000000000..87b1058c89e5 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "depthai_viewer_backend" +version = "0.0.1" +authors = [{ name = "Filip Jeretina", email = "filip.jeretina@luxonis.com" }] +description = "DepthAi Viewer Backend" + +[build-system] +requires = ["flit"] +build-backend = "flit.buildapi" + + +dependencies = ["depthai-sdk", "depthai", "numpy==1.24.*", "rerun_sdk", "ahrs"] diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/sdk_callbacks.py b/rerun_py/rerun_sdk/depthai_viewer_backend/sdk_callbacks.py new file mode 100644 index 000000000000..2602367a4b57 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/sdk_callbacks.py @@ -0,0 +1,190 @@ +from typing import Callable, Dict, List, Tuple, Union + +import cv2 +import depthai as dai +import numpy as np +import rerun as rr +from ahrs.filters import Mahony +from depthai_sdk.classes.packets import ( + DepthPacket, + DetectionPacket, + FramePacket, + IMUPacket, + PointcloudPacket, + TwoStagePacket, +) +from rerun.components.rect2d import RectFormat + +from depthai_viewer_backend import classification_labels +from depthai_viewer_backend.store import Store +from depthai_viewer_backend.topic import Topic + + +class EntityPath: + LEFT_PINHOLE_CAMERA = "mono/camera/left_mono" + LEFT_CAMERA_IMAGE = "mono/camera/left_mono/Left mono" + RIGHT_PINHOLE_CAMERA = "mono/camera/right_mono" + RIGHT_CAMERA_IMAGE = "mono/camera/right_mono/Right mono" + RGB_PINHOLE_CAMERA = "color/camera/rgb" + RGB_CAMERA_IMAGE = "color/camera/rgb/Color camera" + + DETECTIONS = "color/camera/rgb/Detections" + DETECTION = "color/camera/rgb/Detection" + + RGB_CAMERA_TRANSFORM = "color/camera" + MONO_CAMERA_TRANSFORM = "mono/camera" + + +class SdkCallbacks: + store: Store + ahrs: Mahony + _get_camera_intrinsics: Callable[[int, int], np.ndarray] + + def __init__(self, store: Store): + rr.init("Depthai Viewer") + rr.connect() + self.store = store + self.ahrs = Mahony(frequency=100) + self.ahrs.Q = np.array([1, 0, 0, 0], dtype=np.float64) + + def set_camera_intrinsics_getter(self, camera_intrinsics_getter: Callable[[int, int], np.ndarray]): + self._get_camera_intrinsics = camera_intrinsics_getter + + def on_imu(self, packet: IMUPacket): + for data in packet.data: + gyro: dai.IMUReportGyroscope = data.gyroscope + accel: dai.IMUReportAccelerometer = data.acceleroMeter + mag: dai.IMUReportMagneticField = data.magneticField + # TODO(filip): Move coordinate mapping to sdk + self.ahrs.Q = self.ahrs.updateIMU( + self.ahrs.Q, np.array([gyro.z, gyro.x, gyro.y]), np.array([accel.z, accel.x, accel.y]) + ) + if Topic.ImuData not in self.store.subscriptions: + return + rr.log_imu([accel.z, accel.x, accel.y], [gyro.z, gyro.x, gyro.y], self.ahrs.Q, [mag.x, mag.y, mag.z]) + + def on_pointcloud(self, packet: PointcloudPacket): + # if Topic.PointCloud not in self.store.subscriptions: + # return + colors = cv2.cvtColor(packet.color_frame.getCvFrame(), cv2.COLOR_BGR2RGB).reshape(-1, 3) + points = packet.points.reshape(-1, 3) + + path = EntityPath.RGB_CAMERA_TRANSFORM + "/Point cloud" + depth = self.store.pipeline_config.depth + if not depth: + # Essentially impossible to get here + return + if depth.align == dai.CameraBoardSocket.LEFT or depth.align == dai.CameraBoardSocket.RIGHT: + path = EntityPath.MONO_CAMERA_TRANSFORM + "/Point cloud" + rr.log_points(path, points, colors=colors) + + def on_color_frame(self, frame: FramePacket): + # Always log pinhole cam and pose (TODO(filip): move somewhere else or not) + if Topic.ColorImage not in self.store.subscriptions: + return + rr.log_rigid3(EntityPath.RGB_CAMERA_TRANSFORM, child_from_parent=([0, 0, 0], self.ahrs.Q), xyz="RDF") + w, h = frame.imgFrame.getWidth(), frame.imgFrame.getHeight() + rr.log_pinhole( + EntityPath.RGB_PINHOLE_CAMERA, child_from_parent=self._get_camera_intrinsics(w, h), width=w, height=h + ) + rr.log_image(EntityPath.RGB_CAMERA_IMAGE, cv2.cvtColor(frame.frame, cv2.COLOR_BGR2RGB)) + + def on_left_frame(self, frame: FramePacket): + if Topic.LeftMono not in self.store.subscriptions: + return + w, h = frame.imgFrame.getWidth(), frame.imgFrame.getHeight() + rr.log_rigid3(EntityPath.MONO_CAMERA_TRANSFORM, child_from_parent=([0, 0, 0], self.ahrs.Q), xyz="RDF") + rr.log_pinhole( + EntityPath.LEFT_PINHOLE_CAMERA, child_from_parent=self._get_camera_intrinsics(w, h), width=w, height=h + ) + rr.log_image(EntityPath.LEFT_CAMERA_IMAGE, frame.frame) + + def on_right_frame(self, frame: FramePacket): + if Topic.RightMono not in self.store.subscriptions: + return + w, h = frame.imgFrame.getWidth(), frame.imgFrame.getHeight() + rr.log_rigid3(EntityPath.MONO_CAMERA_TRANSFORM, child_from_parent=([0, 0, 0], self.ahrs.Q), xyz="RDF") + rr.log_pinhole( + EntityPath.RIGHT_PINHOLE_CAMERA, child_from_parent=self._get_camera_intrinsics(w, h), width=w, height=h + ) + rr.log_image(EntityPath.RIGHT_CAMERA_IMAGE, frame.frame) + + def on_stereo_frame(self, frame: DepthPacket): + if Topic.DepthImage not in self.store.subscriptions: + return + depth_frame = frame.frame + # Maybe move to rerun depth cloud in the future + # depth_frame_color = depth_frame + # pinhole_camera = PinholeCamera(self._device.get_intrinsic_matrix(frame.imgFrame.getWidth( + # ), frame.imgFrame.getHeight()), frame.imgFrame.getWidth(), frame.imgFrame.getHeight()) + # depth_frame_color = cv2.normalize(depth_frame, None, 255, 0, cv2.NORM_INF, cv2.CV_8UC1) + # depth_frame_color = cv2.equalizeHist(depth_frame_color) + # depth_frame_color = cv2.applyColorMap(depth_frame_color, cv2.COLORMAP_HOT) + path = EntityPath.RGB_PINHOLE_CAMERA + "/depth" + depth = self.store.pipeline_config.depth + if not depth: + # Essentially impossible to get here + return + if depth.align == dai.CameraBoardSocket.LEFT: + path = EntityPath.LEFT_PINHOLE_CAMERA + "/depth" + elif depth.align == dai.CameraBoardSocket.RIGHT: + path = EntityPath.RIGHT_PINHOLE_CAMERA + "/depth" + rr.log_depth_image(path, depth_frame, meter=1e3) + + def on_detections(self, packet: DetectionPacket): + rects, colors, labels = self._detections_to_rects_colors_labels(packet) + rr.log_rects(EntityPath.DETECTIONS, rects, rect_format=RectFormat.XYXY, colors=colors, labels=labels) + + def _detections_to_rects_colors_labels( + self, packet: DetectionPacket, labels_dict: Union[Dict, None] = None + ) -> Tuple[List, List, List]: + h, w, _ = packet.frame.shape + rects = [] + colors = [] + labels = [] + for detection in packet.img_detections.detections: + rects.append( + [ + max(detection.xmin, 0) * w, + max(detection.ymin, 0) * h, + min(detection.xmax, 1) * w, + min(detection.ymax, 1) * h, + ] + ) + colors.append([0, 255, 0]) + label = "" + if labels_dict is not None: + label += labels_dict[detection.label] + ", " + label += str(int(detection.confidence * 100)) + "%" + labels.append(label) + return rects, colors, labels + + def on_yolo_packet(self, packet: DetectionPacket): + rects, colors, labels = self._detections_to_rects_colors_labels(packet, classification_labels.YOLO_TINY_LABELS) + rr.log_rects(EntityPath.DETECTIONS, rects=rects, colors=colors, labels=labels, rect_format=RectFormat.XYXY) + + def on_age_gender_packet(self, packet: TwoStagePacket): + for det, rec in zip(packet.detections, packet.nnData): + age = int(float(np.squeeze(np.array(rec.getLayerFp16("age_conv3")))) * 100) + gender = np.squeeze(np.array(rec.getLayerFp16("prob"))) + gender_str = "Woman" if gender[0] > gender[1] else "Man" + label = f"{gender_str}, {age}" + color = [255, 0, 0] if gender[0] > gender[1] else [0, 0, 255] + x0, y0, x1, y1 = det.get_bbox() + # TODO(filip): maybe use rr.log_annotation_context to log class colors for detections + rr.log_rect( + EntityPath.DETECTION, + [ + x0 * packet.frame.shape[1], + y0 * packet.frame.shape[0], + x1 * packet.frame.shape[1], + y1 * packet.frame.shape[0], + ], + rect_format=RectFormat.XYXY, + color=color, + label=label, + ) + + def on_mobilenet_ssd_packet(self, packet: DetectionPacket): + rects, colors, labels = self._detections_to_rects_colors_labels(packet, classification_labels.MOBILENET_LABELS) + rr.log_rects(EntityPath.DETECTIONS, rects=rects, colors=colors, labels=labels, rect_format=RectFormat.XYXY) diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/store.py b/rerun_py/rerun_sdk/depthai_viewer_backend/store.py new file mode 100644 index 000000000000..13ff2781de1e --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/store.py @@ -0,0 +1,53 @@ +from enum import Enum +from typing import Callable, List, Tuple + +from depthai_viewer_backend.device_configuration import PipelineConfiguration +from depthai_viewer_backend.topic import Topic + + +class Action(Enum): + UPDATE_PIPELINE = 0 + SELECT_DEVICE = 1 + GET_SUBSCRIPTIONS = 2 + SET_SUBSCRIPTIONS = 3 + GET_PIPELINE = 4 + RESET = 5 # When anything bad happens, a reset occurs (like closing ws connection) + GET_AVAILABLE_DEVICES = 6 + + +class Store: + pipeline_config: PipelineConfiguration = PipelineConfiguration() + subscriptions: List[Topic] = [] + on_update_pipeline: Callable[[], Tuple[bool, str]] = None + on_select_device: Callable[[str], Tuple[bool, str]] = None + on_reset: Callable[[], Tuple[bool, str]] = None + + def handle_action(self, action: Action, **kwargs) -> Tuple[bool, str]: + if action == Action.UPDATE_PIPELINE: + if kwargs.get("pipeline_config", None): + if self.on_update_pipeline: + old_pipeline_config = self.pipeline_config + self.pipeline_config = kwargs.get("pipeline_config") + success, message = self.on_update_pipeline() + if success: + return success, message + self.pipeline_config = old_pipeline_config + return success, message + elif action == Action.SELECT_DEVICE: + device_id = kwargs.get("device_id", None) + if device_id is not None: + self.device_id = device_id + if self.on_select_device: + return self.on_select_device(device_id) + elif action == Action.GET_SUBSCRIPTIONS: + return self.subscriptions + elif action == Action.SET_SUBSCRIPTIONS: + self.subscriptions = kwargs.get("subscriptions", []) + elif action == Action.GET_PIPELINE: + return self.pipeline_config + elif action == Action.RESET: + if self.on_reset: + self.pipeline_config = None + self.subscriptions = [] + return self.on_reset() + return False, f"Action: {action} didn't succeed!" diff --git a/rerun_py/rerun_sdk/depthai_viewer_backend/topic.py b/rerun_py/rerun_sdk/depthai_viewer_backend/topic.py new file mode 100644 index 000000000000..3d7562ce6db2 --- /dev/null +++ b/rerun_py/rerun_sdk/depthai_viewer_backend/topic.py @@ -0,0 +1,24 @@ +from enum import Enum +from typing import Union + + +class Topic(Enum): + """All topics that can be subscribed to.""" + + ColorImage = 0 + LeftMono = 1 + RightMono = 2 + DepthImage = 3 + PinholeCamera = 4 + Rectangle = 5 + Rectangles = 6 + ImuData = 7 + + @classmethod + def create(cls, name_or_id: Union[str, int]) -> "Topic": + if type(name_or_id) == str: + return Topic[name_or_id] + elif type(name_or_id) == int: + return Topic(name_or_id) + else: + raise ValueError("Invalid topic name or id: ", name_or_id) diff --git a/rerun_py/rerun_sdk/rerun/__init__.py b/rerun_py/rerun_sdk/rerun/__init__.py index 799ec2ccfd50..1f8961ff5964 100644 --- a/rerun_py/rerun_sdk/rerun/__init__.py +++ b/rerun_py/rerun_sdk/rerun/__init__.py @@ -13,8 +13,10 @@ from rerun.log.extension_components import log_extension_components from rerun.log.file import ImageFormat, MeshFormat, log_image_file, log_mesh_file from rerun.log.image import log_depth_image, log_image, log_segmentation_image +from rerun.log.imu import log_imu from rerun.log.lines import log_line_segments, log_line_strip, log_path from rerun.log.mesh import log_mesh, log_meshes +from rerun.log.pipeline_graph import log_pipeline_graph from rerun.log.points import log_point, log_points from rerun.log.rects import RectFormat, log_rect, log_rects from rerun.log.scalar import log_scalar @@ -39,6 +41,7 @@ "log_extension_components", "log_image_file", "log_image", + "log_pipeline_graph", "log_line_segments", "log_line_strip", "log_mesh_file", @@ -65,6 +68,7 @@ "script_add_args", "script_setup", "script_teardown", + "log_imu", ] @@ -145,7 +149,7 @@ def init(application_id: str, spawn: bool = False, default_enabled: bool = True, and another doing camera calibration, you could have `rerun.init("object_detector")` and `rerun.init("calibrator")`. spawn : bool - Spawn a Rerun Viewer and stream logging data to it. + Spawn a Depthai Viewer and stream logging data to it. Short for calling `spawn` separately. If you don't call this, log events will be buffered indefinitely until you call either `connect`, `show`, or `save` @@ -181,7 +185,8 @@ def init(application_id: str, spawn: bool = False, default_enabled: bool = True, stack = inspect.stack() for frame in stack[:MAX_FRAMES]: filename = frame[FRAME_FILENAME_INDEX] - path = pathlib.Path(str(filename)).resolve() # normalize before comparison! + # normalize before comparison! + path = pathlib.Path(str(filename)).resolve() if "rerun/examples" in str(path): application_path = path except Exception: @@ -264,9 +269,9 @@ def set_strict_mode(strict_mode: bool) -> None: def connect(addr: Optional[str] = None) -> None: """ - Connect to a remote Rerun Viewer on the given ip:port. + Connect to a remote Depthai Viewer on the given ip:port. - Requires that you first start a Rerun Viewer, e.g. with 'python -m rerun' + Requires that you first start a Depthai Viewer, e.g. with 'python -m rerun' This function returns immediately. @@ -289,7 +294,7 @@ def connect(addr: Optional[str] = None) -> None: def spawn(port: int = 9876, connect: bool = True) -> None: """ - Spawn a Rerun Viewer, listening on the given port. + Spawn a Depthai Viewer, listening on the given port. This is often the easiest and best way to use Rerun. Just call this once at the start of your program. diff --git a/rerun_py/rerun_sdk/rerun/components/imu.py b/rerun_py/rerun_sdk/rerun/components/imu.py new file mode 100644 index 000000000000..272b4d666ea6 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/components/imu.py @@ -0,0 +1,38 @@ +from typing import Union + +import numpy as np +import numpy.typing as npt +import pyarrow as pa + +from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory +from rerun.components.point import Point3DArray, Point3DType +from rerun.components.quaternion import QuaternionArray + +__all__ = ["ImuType", "Imu"] + + +class Imu(pa.ExtensionArray): # type: ignore[misc] + def create( + accel: npt.NDArray[np.float32], + gyro: npt.NDArray[np.float32], + orientation: npt.NDArray[np.float32], + mag: Union[npt.NDArray[np.float32], None] = None, + ) -> "Imu": + """Build Imu data from acceleration and gyroscope data.""" + assert accel.shape[0] == 3 + assert gyro.shape[0] == 3 + accel_point = Point3DArray.from_numpy(accel.reshape(1, 3)) + gyro_point = Point3DArray.from_numpy(gyro.reshape(1, 3)) + quat = QuaternionArray.from_numpy(np.array(orientation, dtype=np.float32).reshape(1, 4)) + mag_point = pa.nulls(1, type=Point3DType.storage_type) + if mag is not None: + mag_point = Point3DArray.from_numpy(np.array(mag, dtype=np.float32).reshape(1, 3)) + return pa.StructArray.from_arrays( # type: ignore[no-any-return] + fields=ImuType.storage_type, + arrays=[accel_point, gyro_point, mag_point, quat], + mask=pa.array([False, False, mag is None, False], type=pa.bool_()), + ) + + +ImuType = ComponentTypeFactory("ImuType", Imu, REGISTERED_COMPONENT_NAMES["rerun.imu"]) +pa.register_extension_type(ImuType()) diff --git a/rerun_py/rerun_sdk/rerun/log/__init__.py b/rerun_py/rerun_sdk/rerun/log/__init__.py index fd7a74d1cf76..350a2875b9a2 100644 --- a/rerun_py/rerun_sdk/rerun/log/__init__.py +++ b/rerun_py/rerun_sdk/rerun/log/__init__.py @@ -23,6 +23,7 @@ "text_internal", "transform", "ext", + "imu", ] diff --git a/rerun_py/rerun_sdk/rerun/log/imu.py b/rerun_py/rerun_sdk/rerun/log/imu.py new file mode 100644 index 000000000000..bcc0c2451bdc --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/log/imu.py @@ -0,0 +1,56 @@ +from typing import Any, Dict, Union + +import numpy as np +import numpy.typing as npt + +from rerun import bindings +from rerun.components.imu import Imu +from rerun.log.log_decorator import log_decorator + + +@log_decorator +def log_imu( + accel: npt.ArrayLike, gyro: npt.ArrayLike, orientation: npt.ArrayLike, mag: Union[npt.ArrayLike, None] = None +) -> None: + """ + Log an IMU sensor reading. + + Parameters + ---------- + entity_path: + Path to the IMU sensor in the space hierarchy. + accel: + Acceleration vector in m/s^2. + gyro: + Angular velocity vector in rad/s. + orientation: + Orientation quaternion. + mag: + Magnetometer vector in uT. + """ + + if accel is not None: + accel = np.require(accel, dtype=np.float32) + else: + raise ValueError("Acceleration vector cannot be None") + if gyro is not None: + gyro = np.require(gyro, dtype=np.float32) + else: + raise ValueError("angular velocity vector cannot be None") + if orientation is not None: + orientation = np.require(orientation, dtype=np.float32) + else: + raise ValueError("orientation vector cannot be None") + + instanced: Dict[str, Any] = {} + if accel.size != 3: + raise ValueError(f"Acceleration vector must have a length of 3, got: {accel.size}") + if gyro.size != 3: + raise ValueError(f"Angular velocity vector must have a length of 3, got: {gyro.size}") + + if orientation.size != 4: + raise ValueError(f"Orientation quaternion must have a length of 4, got: {orientation.size}") + + instanced["rerun.imu"] = Imu.create(accel, gyro, orientation, mag) # type: ignore[arg-type] + # Fixed imu entity path + bindings.log_arrow_msg("imu_data", components=instanced, timeless=False) diff --git a/rerun_py/rerun_sdk/rerun/log/pipeline_graph.py b/rerun_py/rerun_sdk/rerun/log/pipeline_graph.py new file mode 100644 index 000000000000..2d0623550b37 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/log/pipeline_graph.py @@ -0,0 +1,60 @@ +# rerun.pipeline_graph + +from typing import Any, Dict, Optional, Sequence + +import numpy as np + +from rerun import bindings +from rerun.components.color import ColorRGBAArray +from rerun.components.instance import InstanceArray +from rerun.components.label import LabelArray +from rerun.components.radius import RadiusArray +from rerun.components.scalar import ScalarArray, ScalarPlotPropsArray +from rerun.log import _normalize_colors +from rerun.log.extension_components import _add_extension_components +from rerun.log.log_decorator import log_decorator + +__all__ = [ + "log_pipeline_graph", +] + + +@log_decorator +def log_pipeline_graph( + entity_path: str, + scalar: float, + label: Optional[str] = None, + color: Optional[Sequence[int]] = None, + radius: Optional[float] = None, + scattered: Optional[bool] = None, + ext: Optional[Dict[str, Any]] = None, +) -> None: + instanced: Dict[str, Any] = {} + splats: Dict[str, Any] = {} + + instanced["rerun.pipeline_graph"] = ScalarArray.from_numpy(np.array([scalar])) + + if label: + instanced["rerun.label"] = LabelArray.new([label]) + + if color: + colors = _normalize_colors(np.array([color])) + instanced["rerun.colorrgba"] = ColorRGBAArray.from_numpy(colors) + + if radius: + instanced["rerun.radius"] = RadiusArray.from_numpy(np.array([radius])) + + if scattered: + props = [{"scattered": scattered}] + instanced["rerun.scalar_plot_props"] = ScalarPlotPropsArray.from_props(props) + + if ext: + _add_extension_components(instanced, splats, ext, None) + + if splats: + splats["rerun.instance_key"] = InstanceArray.splat() + bindings.log_arrow_msg(entity_path, components=splats, timeless=False) + + # Always the primary component last so range-based queries will include the other data. See(#1215) + if instanced: + bindings.log_arrow_msg(entity_path, components=instanced, timeless=False) diff --git a/rerun_py/rerun_sdk/rerun/script_helpers.py b/rerun_py/rerun_sdk/rerun/script_helpers.py index 8b59133dd090..3504d9d74593 100644 --- a/rerun_py/rerun_sdk/rerun/script_helpers.py +++ b/rerun_py/rerun_sdk/rerun/script_helpers.py @@ -91,10 +91,10 @@ def script_teardown(args: Namespace) -> None: """ if args.serve: - import signal - from threading import Event + import time - exit = Event() - signal.signal(signal.SIGINT, lambda sig, frame: exit.set()) - print("Sleeping while serving the web viewer. Abort with Ctrl-C") - exit.wait() + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + print("Ctrl-C received. Exiting.") diff --git a/rerun_py/rerun_sdk/rerun_demo/__init__.py b/rerun_py/rerun_sdk/rerun_demo/__init__.py index 9f07b733fc92..5e4d6827294a 100644 --- a/rerun_py/rerun_sdk/rerun_demo/__init__.py +++ b/rerun_py/rerun_sdk/rerun_demo/__init__.py @@ -17,7 +17,7 @@ rr.log_points("my_points", color_grid.positions, colors=color_grid.colors) ``` -Note that because this package is shipped with the rerun-sdk pypi package, it +Note that because this package is shipped with the depthai-viewer pypi package, it cannot carry any dependencies beyond those of rerun itself. This generally limits demos to only using the standard library and numpy for data generation. """ diff --git a/rerun_py/rerun_sdk/rerun_demo/__main__.py b/rerun_py/rerun_sdk/rerun_demo/__main__.py index ce5a5b34fdf4..dcb58f23d1ef 100644 --- a/rerun_py/rerun_sdk/rerun_demo/__main__.py +++ b/rerun_py/rerun_sdk/rerun_demo/__main__.py @@ -5,15 +5,16 @@ import sys -def run_cube(): +def run_cube(args: argparse.Namespace): import math import numpy as np import rerun as rr - rr.init("Cube", spawn=True, default_enabled=True) from rerun_demo.data import build_color_grid + rr.script_setup(args, "Cube") + STEPS = 100 twists = math.pi * np.sin(np.linspace(0, math.tau, STEPS)) / 4 for t in range(STEPS): @@ -21,10 +22,25 @@ def run_cube(): cube = build_color_grid(10, 10, 10, twist=twists[t]) rr.log_points("cube", positions=cube.positions, colors=cube.colors, radii=0.5) + rr.script_teardown(args) + -def run_colmap(): +def run_colmap(args): from rerun import bindings, unregister_shutdown # type: ignore[attr-defined] + serve_opts = [] + + # TODO(https://github.com/rerun-io/rerun/issues/1924): The need to special-case + # this flag conversion is a bit awkward. + if args.connect or args.addr: + print("Connecting to external viewer is only supported with the --cube demo.", file=sys.stderr) + exit(1) + if args.save: + print("Saving an RRD file is only supported from the --cube demo.", file=sys.stderr) + exit(1) + if args.serve: + serve_opts.append("--web-viewer") + # We don't need to call shutdown in this case. Rust should be handling everything unregister_shutdown() @@ -33,11 +49,13 @@ def run_colmap(): print("No demo file found at {}. Package was built without demo support".format(rrd_file), file=sys.stderr) exit(1) else: - exit(bindings.main([sys.argv[0], str(rrd_file)])) + exit(bindings.main([sys.argv[0], str(rrd_file)] + serve_opts)) def main() -> None: - parser = argparse.ArgumentParser(description="Run rerun example programs") + import rerun as rr + + parser = argparse.ArgumentParser(description="Run rerun example programs.") group = parser.add_mutually_exclusive_group() @@ -53,16 +71,18 @@ def main() -> None: help="Run the COLMAP data demo", ) + rr.script_add_args(parser) + args = parser.parse_args() if not any([args.cube, args.colmap]): args.cube = True if args.cube: - run_cube() + run_cube(args) elif args.colmap: - run_colmap() + run_colmap(args) if __name__ == "__main__": diff --git a/rerun_py/src/arrow.rs b/rerun_py/src/arrow.rs index b7e8c0220006..0a0d7e5663cd 100644 --- a/rerun_py/src/arrow.rs +++ b/rerun_py/src/arrow.rs @@ -35,7 +35,7 @@ fn array_to_rust(arrow_array: &PyAny, name: Option<&str>) -> PyResult<(Box) -> PyResult<(Box None: + # Initialize the GitHub and GCS clients + gh = Github(github_token) # NOLINT + gcs_client = storage.Client() + + # Get the list of commits associated with the PR + repo = gh.get_repo(github_repository) + pull = repo.get_pull(pr_number) + all_commits = [commit.sha for commit in pull.get_commits()] + all_commits.reverse() + + # Prepare the found_builds list + found_builds = [] + viewer_bucket = gcs_client.bucket("rerun-web-viewer") + builds_bucket = gcs_client.bucket("rerun-builds") + + for commit in all_commits: + commit_short = commit[:7] + print("Checking commit: {}...".format(commit_short)) + + found: Dict[str, Any] = {} + + # Check if there is a hosted app for the current commit + app_blob = viewer_bucket.blob(f"commit/{commit_short}/index.html") + if app_blob.exists(): + print("Found web assets commit: {}".format(commit_short)) + found["hosted_app"] = f"https://app.rerun.io/commit/{commit_short}" + + # Check if there are benchmark results + bench_blob = builds_bucket.blob(f"commit/{commit_short}/bench_results.txt") + if bench_blob.exists(): + print("Found benchmark results: {}".format(commit_short)) + found["bench_results"] = f"https://build.rerun.io/{bench_blob.name}" + + # Check if there are notebook results + notebook_blobs = list(builds_bucket.list_blobs(prefix=f"commit/{commit_short}/notebooks")) + notebooks = [f"https://build.rerun.io/{blob.name}" for blob in notebook_blobs if blob.name.endswith(".html")] + if notebooks: + print("Found notebooks for commit: {}".format(commit_short)) + found["notebooks"] = notebooks + + # Get the wheel files for the commit + wheel_blobs = list(builds_bucket.list_blobs(prefix=f"commit/{commit_short}/wheels")) + wheels = [f"https://build.rerun.io/{blob.name}" for blob in wheel_blobs if blob.name.endswith(".whl")] + if wheels: + print("Found wheels for commit: {}".format(commit_short)) + found["wheels"] = wheels + + if found: + found["commit"] = commit_short + found_builds.append(found) + + template_path = os.path.join(os.path.dirname(os.path.relpath(__file__)), "templates/pr_results_summary.html") + + # Render the Jinja template with the found_builds variable + with open(template_path) as f: + template = Template(f.read()) + + buffer = io.BytesIO(template.render(found_builds=found_builds, pr_number=pr_number).encode("utf-8")) + buffer.seek(0) + + if upload: + upload_blob = builds_bucket.blob(f"pull_request/{pr_number}/index.html") + print("Uploading results to {}".format(upload_blob.name)) + upload_blob.upload_from_file(buffer, content_type="text/html") + + # If there's a {{ pr-build-summary }} string in the PR description, replace it with a link to the summary page. + pr_description = pull.body + new_description = pr_description.replace("{{ pr-build-summary }}", f"https://build.rerun.io/pr/{pr_number}") + pull.edit(body=new_description) + + +def main() -> None: + parser = argparse.ArgumentParser(description="Generate a PR summary page") + parser.add_argument("--github-token", required=True, help="GitHub token") + parser.add_argument("--github-repository", required=True, help="GitHub repository") + parser.add_argument("--pr-number", required=True, type=int, help="PR number") + parser.add_argument("--upload", action="store_true", help="Upload the summary page to GCS") + args = parser.parse_args() + + generate_pr_summary(args.github_token, args.github_repository, args.pr_number, args.upload) + + +if __name__ == "__main__": + main() diff --git a/scripts/generate_prerelease_pip_index.py b/scripts/generate_prerelease_pip_index.py new file mode 100644 index 000000000000..64dee36e358e --- /dev/null +++ b/scripts/generate_prerelease_pip_index.py @@ -0,0 +1,71 @@ +""" +Script to generate a minimal pip index. + +This script use the google cloud storage APIs to find and link to the builds +associated with a given commit. + +This is expected to be run by the `reusable_pip_index.yml` GitHub workflow. + +Requires the following packages: + pip install google-cloud-storage Jinja2 PyGithub # NOLINT +""" + +import argparse +import io +import os +from typing import Any, Dict + +from google.cloud import storage +from jinja2 import Template + + +def generate_pip_index(commit: str, upload: bool) -> None: + # Initialize the GCS clients + gcs_client = storage.Client() + + # Prepare the found_builds list + found_builds = [] + wheels_bucket = gcs_client.bucket("rerun-builds") + + commit_short = commit[:7] + print("Checking commit: {}...".format(commit_short)) + + found: Dict[str, Any] = {} + + # Get the wheel files for the commit + wheel_blobs = list(wheels_bucket.list_blobs(prefix=f"commit/{commit_short}/wheels")) + wheels = [blob.name.split("/")[-1] for blob in wheel_blobs if blob.name.endswith(".whl")] + if wheels: + print("Found wheels for commit: {}: {}".format(commit_short, wheels)) + found["wheels"] = wheels + + if found: + found["commit"] = commit_short + found_builds.append(found) + + template_path = os.path.join(os.path.dirname(os.path.relpath(__file__)), "templates/pip_index.html") + + # Render the Jinja template with the found_builds variable + with open(template_path) as f: + template = Template(f.read()) + + buffer = io.BytesIO(template.render(found_builds=found_builds).encode("utf-8")) + buffer.seek(0) + + if upload: + upload_blob = wheels_bucket.blob(f"commit/{commit_short}/wheels/index.html") + print("Uploading results to {}".format(upload_blob.name)) + upload_blob.upload_from_file(buffer, content_type="text/html") + + +def main() -> None: + parser = argparse.ArgumentParser(description="Generate a minimal pip index") + parser.add_argument("--commit", required=True, help="Commit SHA") + parser.add_argument("--upload", action="store_true", help="Upload the index to GCS") + args = parser.parse_args() + + generate_pip_index(args.commit, args.upload) + + +if __name__ == "__main__": + main() diff --git a/scripts/lint.py b/scripts/lint.py index 375e156ed263..b56653670877 100755 --- a/scripts/lint.py +++ b/scripts/lint.py @@ -17,6 +17,7 @@ todo_pattern = re.compile(r"TODO([^(]|$)") debug_format_of_err = re.compile(r"\{\:#?\?\}.*, err") error_match_name = re.compile(r"Err\((\w+)\)") +error_map_err_name = re.compile(r"map_err\(\|(\w+)\|") wasm_caps = re.compile(r"\bWASM\b") nb_prefix = re.compile(r"nb_") @@ -52,7 +53,8 @@ def lint_line(line: str) -> Optional[str]: if "{err:?}" in line or "{err:#?}" in line or debug_format_of_err.search(line): return "Format errors with re_error::format or using Display - NOT Debug formatting!" - if m := re.search(error_match_name, line): + m = re.search(error_map_err_name, line) or re.search(error_match_name, line) + if m: name = m.group(1) # if name not in ("err", "_err", "_"): if name in ("e", "error"): @@ -82,6 +84,9 @@ def test_lint_line() -> None: "if let Err(err) = foo", "if let Err(_err) = foo", "if let Err(_) = foo", + "map_err(|err| …)", + "map_err(|_err| …)", + "map_err(|_| …)", "WASM_FOO env var", "Wasm", "num_instances", @@ -100,6 +105,7 @@ def test_lint_line() -> None: 'eprintln!("{:?}", err)', 'eprintln!("{:#?}", err)', "if let Err(error) = foo", + "map_err(|e| …)", "We use WASM in Rerun", "nb_instances", "inner_nb_instances", diff --git a/scripts/publish_crates.sh b/scripts/publish_crates.sh index 1c52ca6cc140..7a52ccf5e012 100755 --- a/scripts/publish_crates.sh +++ b/scripts/publish_crates.sh @@ -95,11 +95,11 @@ cargo publish $FLAGS -p re_build_build_info cargo publish $FLAGS -p re_log cargo publish $FLAGS -p re_int_histogram cargo publish $FLAGS -p re_error +cargo publish $FLAGS -p re_tuid cargo publish $FLAGS -p re_format cargo publish $FLAGS -p re_string_interner cargo publish $FLAGS -p re_analytics cargo publish $FLAGS -p re_memory -cargo publish $FLAGS -p re_tuid cargo publish $FLAGS -p re_log_types cargo publish $FLAGS -p re_smart_channel cargo publish $FLAGS -p re_log_encoding diff --git a/scripts/run_python_e2e_test.py b/scripts/run_python_e2e_test.py index 977d1c7bd0a1..ee2961f604b0 100755 --- a/scripts/run_python_e2e_test.py +++ b/scripts/run_python_e2e_test.py @@ -21,22 +21,22 @@ def main() -> None: parser = argparse.ArgumentParser(description="Logs Objectron data using the Rerun SDK.") - parser.add_argument("--no-build", action="store_true", help="Skip building rerun-sdk") + parser.add_argument("--no-build", action="store_true", help="Skip building depthai-viewer") parser.add_argument("--no-pip-reqs", action="store_true", help="Skip installing pip requirements") if parser.parse_args().no_build: - print("Skipping building rerun-sdk - assuming it is already built and up-to-date!") + print("Skipping building depthai-viewer - assuming it is already built and up-to-date!") else: build_env = os.environ.copy() if "RUST_LOG" in build_env: del build_env["RUST_LOG"] # The user likely only meant it for the actual tests; not the setup print("----------------------------------------------------------") - print("Building rerun-sdk…") + print("Building depthai-viewer…") start_time = time.time() subprocess.Popen(["just", "py-build", "--quiet"], env=build_env).wait() elapsed = time.time() - start_time - print(f"rerun-sdk built in {elapsed:.1f} seconds") + print(f"depthai-viewer built in {elapsed:.1f} seconds") print("") if not parser.parse_args().no_pip_reqs: diff --git a/scripts/templates/pip_index.html b/scripts/templates/pip_index.html new file mode 100644 index 000000000000..624c924601ae --- /dev/null +++ b/scripts/templates/pip_index.html @@ -0,0 +1,42 @@ + + + + + Pip Index + + + + +

Minimum Pip Index

+ + {% for build in found_builds %} +
+

Commit: {{ build.commit }}

+ {% if build.wheels %} +
+

Wheels:

+
    + {% for wheel in build.wheels %} +
  • {{ wheel }}
  • + {% endfor %} +
+
+ {% endif %} +
+ {% endfor %} + + + diff --git a/scripts/templates/pr_results_summary.html b/scripts/templates/pr_results_summary.html new file mode 100644 index 000000000000..542171ad6635 --- /dev/null +++ b/scripts/templates/pr_results_summary.html @@ -0,0 +1,65 @@ + + + + + Build Summary + + + + +

Build Summary for #{{pr_number}}

+ + {% for build in found_builds %} +
+

Commit: {{ build.commit }}

+ {% if build.hosted_app %} +
+

Hosted App:

+ {{ build.hosted_app }} +
+ {% endif %} + {% if build.bench_results %} +

Benchmark Results

+ {% endif %} + {% if build.notebooks %} +
+

Notebooks:

+ +
+ {% endif %} + {% if build.wheels %} +
+

Wheels:

+ +
+ {% endif %} +
+ {% endfor %} + + + diff --git a/scripts/version_util.py b/scripts/version_util.py index 52f555966f23..e694f24c0b80 100755 --- a/scripts/version_util.py +++ b/scripts/version_util.py @@ -7,12 +7,10 @@ --patch_prerelease: This will patch the version in rerun/Cargo.toml with the current git sha. This is intended to create a prerelease version for continuous releases. - --check_version: This will check that the version in rerun/Cargo.toml matches the version in the tag name from - `GITHUB_REF_NAME`. This is intended to be used to check that the version number in Cargo.toml is correct before - creating a release on PyPI. If the versions don't match, an exception will be raised. + --bare_cargo_version Outputs the bare cargo version. This is helpful for setting an environment variable, such as: + EXPECTED_VERSION=$(python3 scripts/version_util.py --bare_cargo_version) """ -import os import re import subprocess import sys @@ -24,9 +22,6 @@ # A regex to match the version number in Cargo.toml as SemVer, e.g., 1.2.3-alpha.0 CARGO_VERSION_REGEX: Final = r"^version\s*=\s*\"(.+)\"$" -# A regex to match the version number in the tag name, e.g. v1.2.3 -VERSION_TAG_REGEX: Final = r"^v(.+)$" - def get_cargo_version(cargo_toml: str) -> semver.VersionInfo: """Using regex, parse the version number from Cargo.toml.""" @@ -44,24 +39,6 @@ def get_git_sha() -> str: return subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode("utf-8").strip() -def get_ref_name_version() -> semver.VersionInfo: - """Return the parsed tag version from the GITHUB_REF_NAME environment variable.""" - - # This is the branch, or tag name that triggered the workflow. - ref_name = os.environ.get("GITHUB_REF_NAME") - - if ref_name is None: - raise Exception("GITHUB_REF_NAME environment variable not set") - - # Extract the version number from the tag name - match = re.search(VERSION_TAG_REGEX, ref_name) - - if match is None: - raise Exception("Could not find valid version number in GITHUB_REF_NAME") - - return semver.parse_version_info(match.groups()[0]) - - def patch_cargo_version(cargo_toml: str, new_version: str) -> str: """Patch the version number in Cargo.toml with `new_version`.""" @@ -99,13 +76,6 @@ def main() -> None: with open("Cargo.toml", "w") as f: f.write(new_cargo_toml) - elif sys.argv[1] == "--check_version": - ref_version = get_ref_name_version() - if cargo_version != ref_version: - raise Exception( - f"Version number in Cargo.toml ({cargo_version}) does not match tag version ({ref_version})" - ) - print(f"Version numbers match: {cargo_version} == {ref_version}") elif sys.argv[1] == "--bare_cargo_version": # Print the bare cargo version. NOTE: do not add additional formatting here. This output # is expected to be fed into an environment variable. diff --git a/web_viewer/manifest.json b/web_viewer/manifest.json index 5f31d7b55c51..1977521d77af 100644 --- a/web_viewer/manifest.json +++ b/web_viewer/manifest.json @@ -1,5 +1,5 @@ { - "name": "Rerun Viewer", + "name": "Depthai Viewer", "short_name": "rerun-viewer-pwa", "icons": [ {