diff --git a/.cargo/config_fast_builds.toml b/.cargo/config_fast_builds.toml index ea6cacee2a1cd7..057a23f05f93c0 100644 --- a/.cargo/config_fast_builds.toml +++ b/.cargo/config_fast_builds.toml @@ -21,7 +21,7 @@ # Mold is a newer linker written by one of the authors of LLD. It boasts even greater performance, specifically # through its high parallelism, though it only supports Linux. # -# Mold is disabled by default in this file. If you wish to enable it, follow the installation instructions for +# Mold is disabled by default in this file. If you wish to enable it, follow the installation instructions for # your corresponding target, disable LLD by commenting out its `-Clink-arg=...` line, and enable Mold by # *uncommenting* its `-Clink-arg=...` line. # @@ -32,7 +32,7 @@ # # # Nightly configuration # -# Be warned that the following features require nightly Rust, which is expiremental and may contain bugs. If you +# Be warned that the following features require nightly Rust, which is experimental and may contain bugs. If you # are having issues, skip this section and use stable Rust instead. # # There are a few unstable features that can improve performance. To use them, first install nightly Rust @@ -51,7 +51,7 @@ # crates to share monomorphized generic code, so they do not duplicate work. # # In other words, instead of crate 1 generating `Foo` and crate 2 generating `Foo` separately, -# only one crate generates `Foo` and the other adds on to the pre-exiting work. +# only one crate generates `Foo` and the other adds on to the pre-existing work. # # Note that you may have some issues with this flag on Windows. If compiling fails due to the 65k symbol limit, # you may have to disable this setting. For more information and possible solutions to this error, see @@ -150,7 +150,7 @@ rustflags = [ # "-Zthreads=0", ] -# Optional: Uncommenting the following improves compile times, but reduces the amount of debug info to 'line number tables only' +# Optional: Uncommenting the following improves compile times, but reduces the amount of debug info to 'line number tables only'. # In most cases the gains are negligible, but if you are on macOS and have slow compile times you should see significant gains. # [profile.dev] # debug = 1 diff --git a/.github/ISSUE_TEMPLATE/performance_regression.md b/.github/ISSUE_TEMPLATE/performance_regression.md index c1ff69775c7214..0f8cd00eeb4dd1 100644 --- a/.github/ISSUE_TEMPLATE/performance_regression.md +++ b/.github/ISSUE_TEMPLATE/performance_regression.md @@ -2,7 +2,7 @@ name: Performance Regression about: Bevy running slowly after upgrading? Report a performance regression. title: '' -labels: C-Bug, C-Performance, C-Regression, S-Needs-Triage +labels: C-Bug, C-Performance, P-Regression, S-Needs-Triage assignees: '' --- diff --git a/.github/example-run/alien_cake_addict.ron b/.github/example-run/alien_cake_addict.ron deleted file mode 100644 index a8113dad0434d7..00000000000000 --- a/.github/example-run/alien_cake_addict.ron +++ /dev/null @@ -1,5 +0,0 @@ -( - events: [ - (300, AppExit), - ] -) diff --git a/.github/example-run/breakout.ron b/.github/example-run/breakout.ron deleted file mode 100644 index c12f84f0a05090..00000000000000 --- a/.github/example-run/breakout.ron +++ /dev/null @@ -1,9 +0,0 @@ -( - setup: ( - fixed_frame_time: Some(0.03), - ), - events: [ - (200, Screenshot), - (900, AppExit), - ] -) diff --git a/.github/example-run/contributors.ron b/.github/example-run/contributors.ron deleted file mode 100644 index 2d50dc7fd03c42..00000000000000 --- a/.github/example-run/contributors.ron +++ /dev/null @@ -1,5 +0,0 @@ -( - events: [ - (900, AppExit), - ] -) diff --git a/.github/example-run/load_gltf.ron b/.github/example-run/load_gltf.ron deleted file mode 100644 index 1ab6c9705c988d..00000000000000 --- a/.github/example-run/load_gltf.ron +++ /dev/null @@ -1,9 +0,0 @@ -( - setup: ( - frame_time: Some(0.03), - ), - events: [ - (100, Screenshot), - (300, AppExit), - ] -) diff --git a/.github/example-run/testbed_2d.ron b/.github/example-run/testbed_2d.ron new file mode 100644 index 00000000000000..467e2fe98f99f5 --- /dev/null +++ b/.github/example-run/testbed_2d.ron @@ -0,0 +1,12 @@ +( + events: [ + (100, Screenshot), + (200, Custom("switch_scene")), + (300, Screenshot), + (400, Custom("switch_scene")), + (500, Screenshot), + (600, Custom("switch_scene")), + (700, Screenshot), + (800, AppExit), + ] +) diff --git a/.github/example-run/testbed_3d.ron b/.github/example-run/testbed_3d.ron new file mode 100644 index 00000000000000..467e2fe98f99f5 --- /dev/null +++ b/.github/example-run/testbed_3d.ron @@ -0,0 +1,12 @@ +( + events: [ + (100, Screenshot), + (200, Custom("switch_scene")), + (300, Screenshot), + (400, Custom("switch_scene")), + (500, Screenshot), + (600, Custom("switch_scene")), + (700, Screenshot), + (800, AppExit), + ] +) diff --git a/.github/example-run/testbed_ui.ron b/.github/example-run/testbed_ui.ron new file mode 100644 index 00000000000000..579f791d664009 --- /dev/null +++ b/.github/example-run/testbed_ui.ron @@ -0,0 +1,6 @@ +( + events: [ + (100, Screenshot), + (200, AppExit), + ] +) diff --git a/.github/workflows/action-on-PR-labeled.yml b/.github/workflows/action-on-PR-labeled.yml index 18b8cb7b7f73af..9887494a487d53 100644 --- a/.github/workflows/action-on-PR-labeled.yml +++ b/.github/workflows/action-on-PR-labeled.yml @@ -14,7 +14,7 @@ permissions: jobs: comment-on-breaking-change-label: runs-on: ubuntu-latest - if: github.event.label.name == 'C-Breaking-Change' && !contains(github.event.pull_request.body, '## Migration Guide') + if: github.event.label.name == 'M-Needs-Migration-Guide' && !contains(github.event.pull_request.body, '## Migration Guide') steps: - uses: actions/github-script@v7 with: diff --git a/.github/workflows/ci-comment-failures.yml b/.github/workflows/ci-comment-failures.yml index 6ec7f52fb90e1b..d926390993e280 100644 --- a/.github/workflows/ci-comment-failures.yml +++ b/.github/workflows/ci-comment-failures.yml @@ -30,7 +30,7 @@ jobs: var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, + run_id: ${{ github.event.workflow_run.id }}, }); var matchArtifacts = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "missing-examples" @@ -88,7 +88,7 @@ jobs: var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, + run_id: ${{ github.event.workflow_run.id }}, }); var matchArtifacts = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "missing-features" @@ -146,7 +146,7 @@ jobs: var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, + run_id: ${{ github.event.workflow_run.id }}, }); var matchArtifacts = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "msrv" @@ -178,3 +178,64 @@ jobs: issue_number: issue_number, body: 'Your PR increases Bevy Minimum Supported Rust Version. Please update the `rust-version` field in the root Cargo.toml file.' }); + + make-macos-screenshots-available: + runs-on: ubuntu-latest + timeout-minutes: 30 + outputs: + branch-name: ${{ steps.branch-name.outputs.result }} + steps: + - name: 'Download artifact' + id: find-artifact + uses: actions/github-script@v7 + with: + result-encoding: string + script: | + var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifacts = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "screenshots-macos" + }); + if (matchArtifacts.length == 0) { return "false" } + var matchArtifact = matchArtifacts[0]; + var download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/screenshots-macos.zip', Buffer.from(download.data)); + return "true" + - name: prepare artifact folder + run: | + unzip screenshots-macos.zip + mkdir screenshots + mv screenshots-* screenshots/ + - name: save screenshots + uses: actions/upload-artifact@v4 + with: + name: screenshots-macos + path: screenshots + - name: branch name + id: branch-name + run: | + if [ -f PR ]; then + echo "result=PR-$(cat PR)-${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT + else + echo "result=${{ github.event.workflow_run.head_branch }}" >> $GITHUB_OUTPUT + fi + + compare-macos-screenshots: + name: Compare macOS screenshots + needs: [make-macos-screenshots-available] + uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml + with: + commit: ${{ github.event.workflow_run.head_sha }} + branch: ${{ needs.make-macos-screenshots-available.outputs.branch-name }} + artifact: screenshots-macos + os: macos + secrets: inherit diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0303f1dc9ca594..74bf325d8aa443 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,8 +72,7 @@ jobs: run: cargo run -p ci -- lints miri: - # Explicity use macOS 14 to take advantage of M1 chip. - runs-on: macos-14 + runs-on: macos-latest timeout-minutes: 60 steps: - uses: actions/checkout@v4 @@ -128,6 +127,31 @@ jobs: # See tools/ci/src/main.rs for the commands this runs run: cargo run -p ci -- compile + check-compiles-no-std: + runs-on: ubuntu-latest + timeout-minutes: 30 + needs: ci + steps: + - uses: actions/checkout@v4 + - uses: actions/cache@v4 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + crates/bevy_ecs_compile_fail_tests/target/ + crates/bevy_reflect_compile_fail_tests/target/ + key: ${{ runner.os }}-cargo-check-compiles-no-std-${{ hashFiles('**/Cargo.toml') }} + - uses: dtolnay/rust-toolchain@stable + with: + targets: x86_64-unknown-none + - name: Install Linux dependencies + uses: ./.github/actions/install-linux-deps + - name: Check Compile + run: cargo run -p ci -- compile-check-no-std + build-wasm: runs-on: ubuntu-latest timeout-minutes: 30 @@ -206,10 +230,10 @@ jobs: - name: Taplo info if: failure() run: | - echo 'To fix toml fmt, please run `taplo fmt`' - echo 'To check for a diff, run `taplo fmt --check --diff' + echo 'To fix toml fmt, please run `taplo fmt`.' + echo 'To check for a diff, run `taplo fmt --check --diff`.' echo 'You can find taplo here: https://taplo.tamasfe.dev/' - echo 'Or if you use VSCode, use the `Even Better Toml` extension with 2 spaces' + echo 'Or if you use VSCode, use the `Even Better Toml` extension.' echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tamasfe.even-better-toml' typos: @@ -218,7 +242,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Check for typos - uses: crate-ci/typos@v1.24.6 + uses: crate-ci/typos@v1.28.2 - name: Typos info if: failure() run: | @@ -228,10 +252,8 @@ jobs: echo 'if you use VSCode, you can also install `Typos Spell Checker' echo 'You can find the extension here: https://marketplace.visualstudio.com/items?itemName=tekumara.typos-vscode' - run-examples-macos-metal: - # Explicity use macOS 14 to take advantage of M1 chip. - runs-on: macos-14 + runs-on: macos-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 @@ -239,10 +261,6 @@ jobs: - name: Disable audio # Disable audio through a patch. on github m1 runners, audio timeouts after 15 minutes run: git apply --ignore-whitespace tools/example-showcase/disable-audio.patch - - name: Build bevy - # this uses the same command as when running the example to ensure build is reused - run: | - TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome" - name: Run examples run: | for example in .github/example-run/*.ron; do @@ -263,6 +281,10 @@ jobs: with: name: example-traces-macos path: traces + - name: Save PR number + if: ${{ github.event_name == 'pull_request' }} + run: | + echo ${{ github.event.number }} > ./screenshots/PR - name: save screenshots uses: actions/upload-artifact@v4 with: @@ -288,7 +310,9 @@ jobs: ~/.cargo/git/db/ target/ key: ${{ runner.os }}-check-doc-${{ hashFiles('**/Cargo.toml') }} - - uses: dtolnay/rust-toolchain@stable + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.NIGHTLY_TOOLCHAIN }} - name: Install Linux dependencies uses: ./.github/actions/install-linux-deps with: @@ -299,7 +323,7 @@ jobs: run: cargo run -p ci -- doc env: CARGO_INCREMENTAL: 0 - RUSTFLAGS: "-C debuginfo=0" + RUSTFLAGS: "-C debuginfo=0 --cfg docsrs_dep" # This currently report a lot of false positives # Enable it again once it's fixed - https://github.com/bevyengine/bevy/issues/1983 # - name: Installs cargo-deadlinks diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index b9e8959df08c8c..d187165a763e9c 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -47,7 +47,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'temurin' diff --git a/.github/workflows/send-screenshots-to-pixeleagle.yml b/.github/workflows/send-screenshots-to-pixeleagle.yml new file mode 100644 index 00000000000000..4372d75ec865ad --- /dev/null +++ b/.github/workflows/send-screenshots-to-pixeleagle.yml @@ -0,0 +1,109 @@ +name: Send Screenshots to Pixel Eagle + +on: + workflow_call: + inputs: + artifact: + required: true + type: string + commit: + required: true + type: string + branch: + required: true + type: string + os: + required: true + type: string + +env: + # Unfortunately, we can't check secrets in `if:` conditionals. However, Github's own documentation + # suggests a workaround: Putting the secret in an environment variable, and checking that instead. + PIXELEAGLE_TOKEN_EXISTS: ${{ secrets.PIXELEAGLE_TOKEN != '' }} + +jobs: + send-to-pixel-eagle: + name: Send screenshots to Pixel Eagle + runs-on: ubuntu-24.04 + # Pixel Eagle is irrelevant for most forks, even of those that allow workflows to run. Thus, we + # disable this job for any forks. Any forks where Pixel Eagle is relevant can comment out the + # `if:` conditional below. + if: ${{ github.repository == 'bevyengine/bevy' }} + steps: + - name: Notify user on non-existent token + if: ${{ ! fromJSON(env.PIXELEAGLE_TOKEN_EXISTS) }} + run: | + echo "The PIXELEAGLE_TOKEN secret does not exist, so uploading screenshots to Pixel Eagle was skipped." >> $GITHUB_STEP_SUMMARY + + - name: Download artifact + if: ${{ fromJSON(env.PIXELEAGLE_TOKEN_EXISTS) }} + uses: actions/download-artifact@v4 + with: + pattern: ${{ inputs.artifact }} + + - name: Send to Pixel Eagle + if: ${{ fromJSON(env.PIXELEAGLE_TOKEN_EXISTS) }} + env: + project: B04F67C0-C054-4A6F-92EC-F599FEC2FD1D + run: | + # Create a new run with its associated metadata + metadata='{"os":"${{ inputs.os }}", "commit": "${{ inputs.commit }}", "branch": "${{ inputs.branch }}"}' + run=`curl https://pixel-eagle.vleue.com/$project/runs --json "$metadata" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.id'` + + SAVEIFS=$IFS + + cd ${{ inputs.artifact }} + + # Read the hashes of the screenshot for fast comparison when they are equal + IFS=$'\n' + # Build a json array of screenshots and their hashes + hashes='['; + for screenshot in $(find . -type f -name "*.png"); + do + name=${screenshot:14} + echo $name + hash=`shasum -a 256 $screenshot | awk '{print $1}'` + hashes="$hashes [\"$name\",\"$hash\"]," + done + hashes=`echo $hashes | rev | cut -c 2- | rev` + hashes="$hashes]" + + IFS=$SAVEIFS + + # Upload screenshots with unknown hashes + curl https://pixel-eagle.vleue.com/$project/runs/$run/hashes --json "$hashes" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} | jq '.[]|[.name] | @tsv' | + while IFS=$'\t' read -r name; do + name=`echo $name | tr -d '"'` + echo "Uploading $name" + curl https://pixel-eagle.vleue.com/$project/runs/$run/screenshots -F "data=@./screenshots-$name" -F "screenshot=$name" --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} + echo + done + + IFS=$SAVEIFS + + cd .. + + # Trigger comparison with the main branch on the same os + curl https://pixel-eagle.vleue.com/$project/runs/$run/compare/auto --json '{"os":"", "branch": "main"}' --oauth2-bearer ${{ secrets.PIXELEAGLE_TOKEN }} > pixeleagle.json + + # Log results + compared_with=`cat pixeleagle.json | jq '.to'` + + status=0 + missing=`cat pixeleagle.json | jq '.missing | length'` + if [ ! $missing -eq 0 ]; then + echo "There are $missing missing screenshots" + echo "::warning title=$missing missing screenshots on ${{ inputs.os }}::https://pixel-eagle.vleue.com/$project/runs/$run/compare/$compared_with" + status=1 + fi + + diff=`cat pixeleagle.json | jq '.diff | length'` + if [ ! $diff -eq 0 ]; then + echo "There are $diff screenshots with a difference" + echo "::warning title=$diff different screenshots on ${{ inputs.os }}::https://pixel-eagle.vleue.com/$project/runs/$run/compare/$compared_with" + status=1 + fi + + echo "created run $run: https://pixel-eagle.vleue.com/$project/runs/$run/compare/$compared_with" + + exit $status diff --git a/.github/workflows/validation-jobs.yml b/.github/workflows/validation-jobs.yml index 3372fc1b0353c8..4c576ac1e176a5 100644 --- a/.github/workflows/validation-jobs.yml +++ b/.github/workflows/validation-jobs.yml @@ -31,7 +31,7 @@ jobs: with: path: | target - key: ${{ runner.os }}-ios-install-${{ matrix.toolchain }}-${{ hashFiles('**/Cargo.lock') }} + key: ${{ runner.os }}-ios-install-${{ hashFiles('**/Cargo.lock') }} # TODO: remove x86 target once it always run on arm GitHub runners - name: Add iOS targets @@ -50,7 +50,7 @@ jobs: - uses: dtolnay/rust-toolchain@stable - name: Set up JDK 17 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'temurin' @@ -78,8 +78,9 @@ jobs: run: cd examples/mobile/android_example && chmod +x gradlew && ./gradlew build run-examples-linux-vulkan: - if: ${{ github.event_name == 'merge_group' }} - runs-on: ubuntu-latest + # also run when pushed to main to update reference screenshots + if: ${{ github.event_name != 'pull_request' }} + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: - uses: actions/checkout@v4 @@ -100,10 +101,6 @@ jobs: target/ key: ${{ runner.os }}-cargo-run-examples-${{ hashFiles('**/Cargo.toml') }} - uses: dtolnay/rust-toolchain@stable - - name: Build bevy - # this uses the same command as when running the example to ensure build is reused - run: | - TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome" - name: Run examples run: | for example in .github/example-run/*.ron; do @@ -135,18 +132,25 @@ jobs: name: example-run-linux path: example-run/ + compare-linux-screenshots: + name: Compare Linux screenshots + needs: [run-examples-linux-vulkan] + uses: ./.github/workflows/send-screenshots-to-pixeleagle.yml + with: + commit: ${{ github.sha }} + branch: ${{ github.ref_name }} + artifact: screenshots-linux + os: linux + secrets: inherit + run-examples-on-windows-dx12: - if: ${{ github.event_name == 'merge_group' }} + # also run when pushed to main to update reference screenshots + if: ${{ github.event_name != 'pull_request' }} runs-on: windows-latest timeout-minutes: 30 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable - - name: Build bevy - shell: bash - # this uses the same command as when running the example to ensure build is reused - run: | - WGPU_BACKEND=dx12 TRACE_CHROME=trace-alien_cake_addict.json CI_TESTING_CONFIG=.github/example-run/alien_cake_addict.ron cargo build --example alien_cake_addict --features "bevy_ci_testing,trace,trace_chrome" - name: Run examples shell: bash run: | @@ -181,7 +185,7 @@ jobs: run-examples-on-wasm: if: ${{ github.event_name == 'merge_group' }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 60 steps: - uses: actions/checkout@v4 @@ -220,7 +224,7 @@ jobs: - name: First Wasm build run: | - cargo build --release --example ui --target wasm32-unknown-unknown + cargo build --release --example testbed_ui --target wasm32-unknown-unknown - name: Run examples shell: bash diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index 8c78f165feb493..f1d621cde9e16d 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -10,12 +10,29 @@ on: env: CARGO_TERM_COLOR: always +# The jobs listed here are intentionally skipped when running on forks, for a number of reasons: +# +# * Scheduled workflows run on the base/default branch, with no way (currently) to change this. On +# forks, the base/default branch is usually kept in sync with the main Bevy repository, meaning +# that running this workflow on forks would just be a waste of resources. +# +# * Even if there was a way to change the branch that a scheduled workflow runs on, forks default +# to not having an issue tracker. +# +# * Even in the event that a fork's issue tracker is enabled, most users probably don't want to +# receive automated issues in the event of a compilation failure. +# +# Because of these reasons, this workflow is irrelevant for 99% of forks. Thus, the jobs here will +# be skipped when running on any repository that isn't the main Bevy repository. + jobs: test: strategy: matrix: os: [windows-latest, ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} + # Disable this job when running on a fork. + if: github.repository == 'bevyengine/bevy' timeout-minutes: 30 steps: - uses: actions/checkout@v4 @@ -31,6 +48,8 @@ jobs: lint: runs-on: ubuntu-latest + # Disable this job when running on a fork. + if: github.repository == 'bevyengine/bevy' timeout-minutes: 30 steps: - uses: actions/checkout@v4 @@ -48,6 +67,8 @@ jobs: check-compiles: runs-on: ubuntu-latest + # Disable this job when running on a fork. + if: github.repository == 'bevyengine/bevy' timeout-minutes: 30 needs: test steps: @@ -59,32 +80,15 @@ jobs: # See tools/ci/src/main.rs for the commands this runs run: cargo run -p ci -- compile - check-doc: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@beta - - name: Install Linux dependencies - uses: ./.github/actions/install-linux-deps - with: - wayland: true - xkb: true - - name: Build and check docs - # See tools/ci/src/main.rs for the commands this runs - run: cargo run -p ci -- doc - env: - CARGO_INCREMENTAL: 0 - RUSTFLAGS: "-C debuginfo=0" - open-issue: name: Warn that weekly CI fails runs-on: ubuntu-latest - needs: [test, lint, check-compiles, check-doc] + needs: [test, lint, check-compiles] permissions: issues: write - # Use always() so the job doesn't get canceled if any other jobs fail - if: ${{ always() && contains(needs.*.result, 'failure') }} + # We disable this job on forks, because + # Use always() so the job doesn't get canceled if any other jobs fail + if: ${{ github.repository == 'bevyengine/bevy' && always() && contains(needs.*.result, 'failure') }} steps: - name: Create issue run: | @@ -94,7 +98,7 @@ jobs: --jq '.[0].number') if [[ -n $previous_issue_number ]]; then gh issue comment $previous_issue_number \ - --body "Weekly pipeline still fails: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + --body "Weekly pipeline still fails: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" else gh issue create \ --title "$TITLE" \ @@ -106,6 +110,6 @@ jobs: GH_REPO: ${{ github.repository }} TITLE: Main branch fails to compile on Rust beta. LABELS: C-Bug,S-Needs-Triage - BODY: | + BODY: | ## Weekly CI run has failed. [The offending run.](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) diff --git a/.gitignore b/.gitignore index 10507748f59a9a..d3b84d9590bb82 100644 --- a/.gitignore +++ b/.gitignore @@ -1,26 +1,33 @@ -/target -crates/*/target +# Rust build artifacts +target +crates/**/target +benches/**/target +tools/**/target **/*.rs.bk + +# Cargo Cargo.lock .cargo/config .cargo/config.toml -/.idea -/.vscode + +# IDE files +.idea +.vscode .zed -/benches/target -/tools/compile_fail_utils/target dxcompiler.dll dxil.dll -# Generated by "examples/scene/scene.rs" -assets/scenes/load_scene_example-new.scn.ron - -# Generated by "examples/window/screenshot.rs" -**/screenshot-*.png - +# Bevy Assets assets/**/*.meta crates/bevy_asset/imported_assets imported_assets +# Bevy Examples example_showcase_config.ron example-showcase-reports/ + +# Generated by "examples/scene/scene.rs" +assets/scenes/load_scene_example-new.scn.ron + +# Generated by "examples/window/screenshot.rs" +**/screenshot-*.png diff --git a/Cargo.toml b/Cargo.toml index 1775b2dd7b7d13..fd61ba79853ba8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ keywords = ["game", "engine", "gamedev", "graphics", "bevy"] license = "MIT OR Apache-2.0" repository = "https://github.com/bevyengine/bevy" documentation = "https://docs.rs/bevy" -rust-version = "1.81.0" +rust-version = "1.82.0" [workspace] exclude = [ @@ -40,6 +40,7 @@ semicolon_if_nothing_returned = "warn" type_complexity = "allow" undocumented_unsafe_blocks = "warn" unwrap_or_default = "warn" +needless_lifetimes = "allow" ptr_as_ptr = "warn" ptr_cast_constness = "warn" @@ -80,6 +81,7 @@ semicolon_if_nothing_returned = "warn" type_complexity = "allow" undocumented_unsafe_blocks = "warn" unwrap_or_default = "warn" +needless_lifetimes = "allow" ptr_as_ptr = "warn" ptr_cast_constness = "warn" @@ -100,45 +102,62 @@ unused_qualifications = "warn" [features] default = [ + "android-game-activity", + "android_shared_stdcxx", "animation", "bevy_asset", - "bevy_state", "bevy_audio", "bevy_color", - "bevy_gilrs", - "bevy_scene", - "bevy_winit", "bevy_core_pipeline", + "bevy_gilrs", + "bevy_gizmos", + "bevy_gltf", + "bevy_mesh_picking_backend", "bevy_pbr", "bevy_picking", - "bevy_sprite_picking_backend", - "bevy_ui_picking_backend", - "bevy_gltf", "bevy_render", + "bevy_scene", "bevy_sprite", + "bevy_sprite_picking_backend", + "bevy_state", "bevy_text", "bevy_ui", - "bevy_remote", + "bevy_ui_picking_backend", + "bevy_window", + "bevy_winit", + "custom_cursor", + "default_font", + "hdr", "multi_threaded", "png", - "hdr", - "vorbis", - "x11", - "bevy_gizmos", - "android_shared_stdcxx", - "tonemapping_luts", "smaa_luts", - "default_font", - "webgl2", "sysinfo_plugin", - "android-game-activity", + "tonemapping_luts", + "vorbis", + "webgl2", + "x11", +] + +# Provides an implementation for picking meshes +bevy_mesh_picking_backend = [ + "bevy_picking", + "bevy_internal/bevy_mesh_picking_backend", ] # Provides an implementation for picking sprites -bevy_sprite_picking_backend = ["bevy_picking"] +bevy_sprite_picking_backend = [ + "bevy_picking", + "bevy_internal/bevy_sprite_picking_backend", +] -# Provides an implementation for picking ui -bevy_ui_picking_backend = ["bevy_picking"] +# Provides an implementation for picking UI +bevy_ui_picking_backend = [ + "bevy_picking", + "bevy_internal/bevy_ui_picking_backend", +] + +# Provides a debug overlay for bevy UI +bevy_ui_debug = ["bevy_internal/bevy_ui_debug"] # Force dynamic linking, which improves iterative compile times dynamic_linking = ["dep:bevy_dylib", "bevy_internal/dynamic_linking"] @@ -210,6 +229,9 @@ bevy_ui = [ "bevy_ui_picking_backend", ] +# Windowing layer +bevy_window = ["bevy_internal/bevy_window"] + # winit window and input backend bevy_winit = ["bevy_internal/bevy_winit"] @@ -241,38 +263,53 @@ trace_tracy_memory = [ # Tracing support trace = ["bevy_internal/trace"] +# Basis Universal compressed texture support +basis-universal = ["bevy_internal/basis-universal"] + +# BMP image format support +bmp = ["bevy_internal/bmp"] + +# DDS compressed texture support +dds = ["bevy_internal/dds"] + # EXR image format support exr = ["bevy_internal/exr"] +# Farbfeld image format support +ff = ["bevy_internal/ff"] + +# GIF image format support +gif = ["bevy_internal/gif"] + # HDR image format support hdr = ["bevy_internal/hdr"] -# PNG image format support -png = ["bevy_internal/png"] +# KTX2 compressed texture support +ktx2 = ["bevy_internal/ktx2"] -# TGA image format support -tga = ["bevy_internal/tga"] +# ICO image format support +ico = ["bevy_internal/ico"] # JPEG image format support jpeg = ["bevy_internal/jpeg"] -# BMP image format support -bmp = ["bevy_internal/bmp"] +# PNG image format support +png = ["bevy_internal/png"] -# WebP image format support -webp = ["bevy_internal/webp"] +# PNM image format support, includes pam, pbm, pgm and ppm +pnm = ["bevy_internal/pnm"] -# Basis Universal compressed texture support -basis-universal = ["bevy_internal/basis-universal"] +# QOI image format support +qoi = ["bevy_internal/qoi"] -# DDS compressed texture support -dds = ["bevy_internal/dds"] +# TGA image format support +tga = ["bevy_internal/tga"] -# KTX2 compressed texture support -ktx2 = ["bevy_internal/ktx2"] +# TIFF image format support +tiff = ["bevy_internal/tiff"] -# PNM image format support, includes pam, pbm, pgm and ppm -pnm = ["bevy_internal/pnm"] +# WebP image format support +webp = ["bevy_internal/webp"] # For KTX2 supercompression zlib = ["bevy_internal/zlib"] @@ -346,7 +383,7 @@ android_shared_stdcxx = ["bevy_internal/android_shared_stdcxx"] # Enable detailed trace event logging. These trace events are expensive even when off, thus they require compile time opt-in detailed_trace = ["bevy_internal/detailed_trace"] -# Include tonemapping Look Up Tables KTX2 files. If everything is pink, you need to enable this feature or change the `Tonemapping` method on your `Camera2dBundle` or `Camera3dBundle`. +# Include tonemapping Look Up Tables KTX2 files. If everything is pink, you need to enable this feature or change the `Tonemapping` method for your `Camera2d` or `Camera3d`. tonemapping_luts = ["bevy_internal/tonemapping_luts", "ktx2", "zstd"] # Include SMAA Look Up Tables KTX2 Files @@ -381,6 +418,9 @@ pbr_multi_layer_material_textures = [ # Enable support for anisotropy texture in the `StandardMaterial`, at the risk of blowing past the global, per-shader texture limit on older/lower-end GPUs pbr_anisotropy_texture = ["bevy_internal/pbr_anisotropy_texture"] +# Enable support for PCSS, at the risk of blowing past the global, per-shader sampler limit on older/lower-end GPUs +experimental_pbr_pcss = ["bevy_internal/experimental_pbr_pcss"] + # Enable some limitations to be able to use WebGL2. Please refer to the [WebGL2 and WebGPU](https://github.com/bevyengine/bevy/tree/latest/examples#webgl2-and-webgpu) section of the examples README for more information on how to run Wasm builds with WebGPU. webgl2 = ["bevy_internal/webgl"] @@ -417,6 +457,12 @@ track_change_detection = ["bevy_internal/track_change_detection"] # Enable function reflection reflect_functions = ["bevy_internal/reflect_functions"] +# Enable winit custom cursor support +custom_cursor = ["bevy_internal/custom_cursor"] + +# Experimental support for nodes that are ignored for UI layouting +ghost_nodes = ["bevy_internal/ghost_nodes"] + [dependencies] bevy_internal = { path = "crates/bevy_internal", version = "0.15.0-dev", default-features = false } @@ -435,15 +481,16 @@ bytemuck = "1.7" bevy_render = { path = "crates/bevy_render", version = "0.15.0-dev", default-features = false } # Needed to poll Task examples futures-lite = "2.0.1" -async-std = "1.12" +async-std = "1.13" crossbeam-channel = "0.5.0" argh = "0.1.12" -thiserror = "1.0" +thiserror = "2.0" event-listener = "5.3.0" hyper = { version = "1", features = ["server", "http1"] } http-body-util = "0.1" anyhow = "1" macro_rules_attribute = "0.2" +accesskit = "0.17" [target.'cfg(not(target_family = "wasm"))'.dev-dependencies] smol = "2" @@ -574,6 +621,17 @@ description = "Renders a glTF mesh in 2D with a custom vertex attribute" category = "2D Rendering" wasm = true +[[example]] +name = "cpu_draw" +path = "examples/2d/cpu_draw.rs" +doc-scrape-examples = true + +[package.metadata.example.cpu_draw] +name = "CPU Drawing" +description = "Manually read/write the pixels of a texture" +category = "2D Rendering" +wasm = true + [[example]] name = "sprite" path = "examples/2d/sprite.rs" @@ -939,6 +997,17 @@ description = "Demonstrates per-pixel motion blur" category = "3D Rendering" wasm = false +[[example]] +name = "order_independent_transparency" +path = "examples/3d/order_independent_transparency.rs" +doc-scrape-examples = true + +[package.metadata.example.order_independent_transparency] +name = "Order Independent Transparency" +description = "Demonstrates how to use OIT" +category = "3D Rendering" +wasm = false + [[example]] name = "tonemapping" path = "examples/3d/tonemapping.rs" @@ -1164,10 +1233,21 @@ setup = [ "curl", "-o", "assets/models/bunny.meshlet_mesh", - "https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/854eb98353ad94aea1104f355fc24dbe4fda679d/bunny.meshlet_mesh", + "https://raw.githubusercontent.com/JMS55/bevy_meshlet_asset/defbd9b32072624d40d57de7d345c66a9edf5d0b/bunny.meshlet_mesh", ], ] +[[example]] +name = "mesh_ray_cast" +path = "examples/3d/mesh_ray_cast.rs" +doc-scrape-examples = true + +[package.metadata.example.mesh_ray_cast] +name = "Mesh Ray Cast" +description = "Demonstrates ray casting with the `MeshRayCast` system parameter" +category = "3D Rendering" +wasm = true + [[example]] name = "lightmaps" path = "examples/3d/lightmaps.rs" @@ -1188,6 +1268,17 @@ doc-scrape-examples = true hidden = true # Animation +[[example]] +name = "animation_events" +path = "examples/animation/animation_events.rs" +doc-scrape-examples = true + +[package.metadata.example.animation_events] +name = "Animation Events" +description = "Demonstrate how to use animation events" +category = "Animation" +wasm = true + [[example]] name = "animated_fox" path = "examples/animation/animated_fox.rs" @@ -1244,13 +1335,24 @@ category = "Animation" wasm = true [[example]] -name = "cubic_curve" -path = "examples/animation/cubic_curve.rs" +name = "eased_motion" +path = "examples/animation/eased_motion.rs" doc-scrape-examples = true -[package.metadata.example.cubic_curve] -name = "Cubic Curve" -description = "Bezier curve example showing a cube following a cubic curve" +[package.metadata.example.eased_motion] +name = "Eased Motion" +description = "Demonstrates the application of easing curves to animate an object" +category = "Animation" +wasm = true + +[[example]] +name = "easing_functions" +path = "examples/animation/easing_functions.rs" +doc-scrape-examples = true + +[package.metadata.example.easing_functions] +name = "Easing Functions" +description = "Showcases the built-in easing functions" category = "Animation" wasm = true @@ -1740,7 +1842,7 @@ wasm = false [package.metadata.example.apply_deferred] name = "Apply System Buffers" -description = "Show how to use `apply_deferred` system" +description = "Show how to use `ApplyDeferred` system" category = "ECS (Entity Component System)" wasm = false @@ -1855,6 +1957,17 @@ description = "Creates a hierarchy of parents and children entities" category = "ECS (Entity Component System)" wasm = false +[[example]] +name = "immutable_components" +path = "examples/ecs/immutable_components.rs" +doc-scrape-examples = true + +[package.metadata.example.immutable_components] +name = "Immutable Components" +description = "Demonstrates the creation and utility of immutable components" +category = "ECS (Entity Component System)" +wasm = false + [[example]] name = "iter_combinations" path = "examples/ecs/iter_combinations.rs" @@ -1921,6 +2034,17 @@ description = "Systems are skipped if their parameters cannot be acquired" category = "ECS (Entity Component System)" wasm = false +[[example]] +name = "fallible_systems" +path = "examples/ecs/fallible_systems.rs" +doc-scrape-examples = true + +[package.metadata.example.fallible_systems] +name = "Fallible Systems" +description = "Systems that return results to handle errors" +category = "ECS (Entity Component System)" +wasm = false + [[example]] name = "startup_system" path = "examples/ecs/startup_system.rs" @@ -2618,6 +2742,28 @@ description = "Test rendering of many UI elements" category = "Stress Tests" wasm = true +[[example]] +name = "many_cameras_lights" +path = "examples/stress_tests/many_cameras_lights.rs" +doc-scrape-examples = true + +[package.metadata.example.many_cameras_lights] +name = "Many Cameras & Lights" +description = "Test rendering of many cameras and lights" +category = "Stress Tests" +wasm = true + +[[example]] +name = "many_components" +path = "examples/stress_tests/many_components.rs" +doc-scrape-examples = true + +[package.metadata.example.many_components] +name = "Many Components (and Entities and Systems)" +description = "Test large ECS systems" +category = "Stress Tests" +wasm = false + [[example]] name = "many_cubes" path = "examples/stress_tests/many_cubes.rs" @@ -2829,6 +2975,17 @@ description = "Demonstrates how to create a node with a border" category = "UI (User Interface)" wasm = true +[[example]] +name = "box_shadow" +path = "examples/ui/box_shadow.rs" +doc-scrape-examples = true + +[package.metadata.example.box_shadow] +name = "Box Shadow" +description = "Demonstrates how to create a node with a shadow" +category = "UI (User Interface)" +wasm = true + [[example]] name = "button" path = "examples/ui/button.rs" @@ -2884,6 +3041,18 @@ description = "Simple example demonstrating overflow behavior" category = "UI (User Interface)" wasm = true +[[example]] +name = "overflow_clip_margin" +path = "examples/ui/overflow_clip_margin.rs" +doc-scrape-examples = true + +[package.metadata.example.overflow_clip_margin] +name = "Overflow Clip Margin" +description = "Simple example demonstrating the OverflowClipMargin style property" +category = "UI (User Interface)" +wasm = true + + [[example]] name = "overflow_debug" path = "examples/ui/overflow_debug.rs" @@ -2976,6 +3145,7 @@ wasm = true name = "ghost_nodes" path = "examples/ui/ghost_nodes.rs" doc-scrape-examples = true +required-features = ["ghost_nodes"] [package.metadata.example.ghost_nodes] name = "Ghost Nodes" @@ -3027,17 +3197,6 @@ description = "Demonstrates how to control the relative depth (z-position) of UI category = "UI (User Interface)" wasm = true -[[example]] -name = "ui" -path = "examples/ui/ui.rs" -doc-scrape-examples = true - -[package.metadata.example.ui] -name = "UI" -description = "Illustrates various features of Bevy UI" -category = "UI (User Interface)" -wasm = true - [[example]] name = "ui_scaling" path = "examples/ui/ui_scaling.rs" @@ -3193,6 +3352,17 @@ description = "Demonstrates customizing default window settings" category = "Window" wasm = true +[[example]] +name = "window_drag_move" +path = "examples/window/window_drag_move.rs" +doc-scrape-examples = true + +[package.metadata.example.window_drag_move] +name = "Window Drag Move" +description = "Demonstrates drag move and drag resize without window decoration" +category = "Window" +wasm = false + [[example]] name = "ambiguity_detection" path = "tests/ecs/ambiguity_detection.rs" @@ -3210,11 +3380,11 @@ doc-scrape-examples = true hidden = true [[example]] -name = "minimising" -path = "tests/window/minimising.rs" +name = "minimizing" +path = "tests/window/minimizing.rs" doc-scrape-examples = true -[package.metadata.example.minimising] +[package.metadata.example.minimizing] hidden = true [[example]] @@ -3420,6 +3590,18 @@ description = "Shows how to orbit a static scene using pitch, yaw, and roll." category = "Camera" wasm = true +[[example]] +name = "2d_screen_shake" +path = "examples/camera/2d_screen_shake.rs" +doc-scrape-examples = true + +[package.metadata.example.2d_screen_shake] +name = "Screen Shake" +description = "A simple 2D screen shake effect" +category = "Camera" +wasm = true + + [package.metadata.example.fps_overlay] name = "FPS overlay" description = "Demonstrates FPS overlay" @@ -3508,6 +3690,7 @@ wasm = true name = "client" path = "examples/remote/client.rs" doc-scrape-examples = true +required-features = ["bevy_remote"] [package.metadata.example.client] name = "client" @@ -3519,6 +3702,7 @@ wasm = false name = "server" path = "examples/remote/server.rs" doc-scrape-examples = true +required-features = ["bevy_remote"] [package.metadata.example.server] name = "server" @@ -3605,6 +3789,18 @@ description = "Demonstrates how to rotate the skybox and the environment map sim category = "3D Rendering" wasm = false +[[example]] +name = "mesh_picking" +path = "examples/picking/mesh_picking.rs" +doc-scrape-examples = true +required-features = ["bevy_mesh_picking_backend"] + +[package.metadata.example.mesh_picking] +name = "Mesh Picking" +description = "Demonstrates picking meshes" +category = "Picking" +wasm = true + [[example]] name = "simple_picking" path = "examples/picking/simple_picking.rs" @@ -3645,6 +3841,7 @@ wasm = true name = "pcss" path = "examples/3d/pcss.rs" doc-scrape-examples = true +required-features = ["experimental_pbr_pcss"] [package.metadata.example.pcss] name = "Percentage-closer soft shadows" @@ -3663,6 +3860,17 @@ description = "Shows how to use animation clips to animate UI properties" category = "Animation" wasm = true +[[example]] +name = "shader_material_bindless" +path = "examples/shader/shader_material_bindless.rs" +doc-scrape-examples = true + +[package.metadata.example.shader_material_bindless] +name = "Material - Bindless" +description = "Demonstrates how to make materials that use bindless textures" +category = "Shaders" +wasm = true + [profile.wasm-release] inherits = "release" opt-level = "z" @@ -3694,3 +3902,36 @@ name = "Monitor info" description = "Displays information about available monitors (displays)." category = "Window" wasm = false + +# Testbed +[[example]] +name = "testbed_2d" +path = "examples/testbed/2d.rs" +doc-scrape-examples = true + +[package.metadata.example.testbed_2d] +hidden = true + +[[example]] +name = "testbed_3d" +path = "examples/testbed/3d.rs" +doc-scrape-examples = true + +[package.metadata.example.testbed_3d] +hidden = true + +[[example]] +name = "testbed_ui" +path = "examples/testbed/ui.rs" +doc-scrape-examples = true + +[package.metadata.example.testbed_ui] +hidden = true + +[[example]] +name = "testbed_ui_layout_rounding" +path = "examples/testbed/ui_layout_rounding.rs" +doc-scrape-examples = true + +[package.metadata.example.testbed_ui_layout_rounding] +hidden = true diff --git a/README.md b/README.md index a8018aa0daad2c..be1bcf6bfec92b 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ Before contributing or participating in discussions with the community, you shou ### Contributing -If you'd like to help build Bevy, check out the **[Contributor's Guide](https://github.com/bevyengine/bevy/blob/main/CONTRIBUTING.md)**. +If you'd like to help build Bevy, check out the **[Contributor's Guide](https://bevyengine.org/learn/contribute/introduction)**. For simple problems, feel free to [open an issue](https://github.com/bevyengine/bevy/issues) or [PR](https://github.com/bevyengine/bevy/pulls) and tackle it yourself! diff --git a/assets/animation_graphs/Fox.animgraph.ron b/assets/animation_graphs/Fox.animgraph.ron index cf87b1400e3b21..e9d6f4f9cf19c0 100644 --- a/assets/animation_graphs/Fox.animgraph.ron +++ b/assets/animation_graphs/Fox.animgraph.ron @@ -2,27 +2,27 @@ graph: ( nodes: [ ( - clip: None, + node_type: Blend, mask: 0, weight: 1.0, ), ( - clip: None, + node_type: Blend, mask: 0, - weight: 0.5, + weight: 1.0, ), ( - clip: Some(AssetPath("models/animated/Fox.glb#Animation0")), + node_type: Clip(AssetPath("models/animated/Fox.glb#Animation0")), mask: 0, weight: 1.0, ), ( - clip: Some(AssetPath("models/animated/Fox.glb#Animation1")), + node_type: Clip(AssetPath("models/animated/Fox.glb#Animation1")), mask: 0, weight: 1.0, ), ( - clip: Some(AssetPath("models/animated/Fox.glb#Animation2")), + node_type: Clip(AssetPath("models/animated/Fox.glb#Animation2")), mask: 0, weight: 1.0, ), diff --git a/assets/models/GltfPrimitives/gltf_primitives.glb b/assets/models/GltfPrimitives/gltf_primitives.glb index 9a53f99cfea0dc..81a13005475b1a 100644 Binary files a/assets/models/GltfPrimitives/gltf_primitives.glb and b/assets/models/GltfPrimitives/gltf_primitives.glb differ diff --git a/assets/scenes/load_scene_example.scn.ron b/assets/scenes/load_scene_example.scn.ron index 6527acf87a1cfc..28af9753943f80 100644 --- a/assets/scenes/load_scene_example.scn.ron +++ b/assets/scenes/load_scene_example.scn.ron @@ -1,37 +1,29 @@ ( resources: { "scene::ResourceA": ( - score: 2, + score: 1, ), }, entities: { 4294967296: ( components: { - "bevy_transform::components::transform::Transform": ( - translation: ( - x: 0.0, - y: 0.0, - z: 0.0 - ), - rotation: ( - x: 0.0, - y: 0.0, - z: 0.0, - w: 1.0, - ), - scale: ( - x: 1.0, - y: 1.0, - z: 1.0 - ), + "bevy_core::name::Name": ( + hash: 17588334858059901562, + name: "joe", ), - "scene::ComponentB": ( - value: "hello", + "bevy_transform::components::global_transform::GlobalTransform": ((1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0)), + "bevy_transform::components::transform::Transform": ( + translation: (0.0, 0.0, 0.0), + rotation: (0.0, 0.0, 0.0, 1.0), + scale: (1.0, 1.0, 1.0), ), "scene::ComponentA": ( x: 1.0, y: 2.0, ), + "scene::ComponentB": ( + value: "hello", + ), }, ), 4294967297: ( @@ -42,5 +34,5 @@ ), }, ), - } -) + }, +) \ No newline at end of file diff --git a/assets/shaders/bindless_material.wgsl b/assets/shaders/bindless_material.wgsl new file mode 100644 index 00000000000000..a8d42de19658ad --- /dev/null +++ b/assets/shaders/bindless_material.wgsl @@ -0,0 +1,38 @@ +#import bevy_pbr::forward_io::VertexOutput +#import bevy_pbr::mesh_bindings::mesh + +struct Color { + base_color: vec4, +} + +#ifdef BINDLESS +@group(2) @binding(0) var material_color: binding_array; +@group(2) @binding(1) var material_color_texture: binding_array, 4>; +@group(2) @binding(2) var material_color_sampler: binding_array; +#else // BINDLESS +@group(2) @binding(0) var material_color: Color; +@group(2) @binding(1) var material_color_texture: texture_2d; +@group(2) @binding(2) var material_color_sampler: sampler; +#endif // BINDLESS + +@fragment +fn fragment(in: VertexOutput) -> @location(0) vec4 { +#ifdef BINDLESS + let slot = mesh[in.instance_index].material_bind_group_slot; + let base_color = material_color[slot].base_color; +#else // BINDLESS + let base_color = material_color.base_color; +#endif // BINDLESS + + return base_color * textureSampleLevel( +#ifdef BINDLESS + material_color_texture[slot], + material_color_sampler[slot], +#else // BINDLESS + material_color_texture, + material_color_sampler, +#endif // BINDLESS + in.uv, + 0.0 + ); +} diff --git a/assets/shaders/custom_material.vert b/assets/shaders/custom_material.vert index 91d660ec3194cf..86ca3629e261f3 100644 --- a/assets/shaders/custom_material.vert +++ b/assets/shaders/custom_material.vert @@ -7,14 +7,16 @@ layout(location = 2) in vec2 Vertex_Uv; layout(location = 0) out vec2 v_Uv; layout(set = 0, binding = 0) uniform CameraViewProj { - mat4 ViewProj; - mat4 View; - mat4 InverseView; - mat4 Projection; - vec3 WorldPosition; - float width; - float height; -}; + mat4 clip_from_world; + // Other attributes exist that can be described here. + // See full definition in: crates/bevy_render/src/view/view.wgsl + // Attributes added here must be in the same order as they are defined + // in view.wgsl, and they must be contiguous starting from the top to + // ensure they have the same layout. + // + // Needing to maintain this mapping yourself is one of the harder parts of using + // GLSL with Bevy. WGSL provides a much better user experience! +} camera_view; struct Mesh { mat3x4 Model; @@ -41,7 +43,7 @@ mat4 affine_to_square(mat3x4 affine) { void main() { v_Uv = Vertex_Uv; - gl_Position = ViewProj + gl_Position = camera_view.clip_from_world * affine_to_square(Meshes[gl_InstanceIndex].Model) * vec4(Vertex_Position, 1.0); } diff --git a/assets/shaders/irradiance_volume_voxel_visualization.wgsl b/assets/shaders/irradiance_volume_voxel_visualization.wgsl index 26a8deb87eb350..f34e6f8453dd7b 100644 --- a/assets/shaders/irradiance_volume_voxel_visualization.wgsl +++ b/assets/shaders/irradiance_volume_voxel_visualization.wgsl @@ -1,6 +1,7 @@ #import bevy_pbr::forward_io::VertexOutput #import bevy_pbr::irradiance_volume #import bevy_pbr::mesh_view_bindings +#import bevy_pbr::clustered_forward struct VoxelVisualizationIrradianceVolumeInfo { world_from_voxel: mat4x4, @@ -25,11 +26,24 @@ fn fragment(mesh: VertexOutput) -> @location(0) vec4 { let stp_rounded = round(stp - 0.5f) + 0.5f; let rounded_world_pos = (irradiance_volume_info.world_from_voxel * vec4(stp_rounded, 1.0f)).xyz; + // Look up the irradiance volume range in the cluster list. + let view_z = dot(vec4( + mesh_view_bindings::view.view_from_world[0].z, + mesh_view_bindings::view.view_from_world[1].z, + mesh_view_bindings::view.view_from_world[2].z, + mesh_view_bindings::view.view_from_world[3].z + ), mesh.world_position); + let cluster_index = clustered_forward::fragment_cluster_index(mesh.position.xy, view_z, false); + var clusterable_object_index_ranges = + clustered_forward::unpack_clusterable_object_index_ranges(cluster_index); + // `irradiance_volume_light()` multiplies by intensity, so cancel it out. // If we take intensity into account, the cubes will be way too bright. let rgb = irradiance_volume::irradiance_volume_light( mesh.world_position.xyz, - mesh.world_normal) / irradiance_volume_info.intensity; + mesh.world_normal, + &clusterable_object_index_ranges, + ) / irradiance_volume_info.intensity; return vec4(rgb, 1.0f); } diff --git a/assets/textures/uv_checker_bw.png b/assets/textures/uv_checker_bw.png new file mode 100644 index 00000000000000..bef39cd66b9d14 Binary files /dev/null and b/assets/textures/uv_checker_bw.png differ diff --git a/benches/Cargo.toml b/benches/Cargo.toml index cc586a3e662416..4841570022b0b9 100644 --- a/benches/Cargo.toml +++ b/benches/Cargo.toml @@ -4,22 +4,31 @@ edition = "2021" description = "Benchmarks that test Bevy's performance" publish = false license = "MIT OR Apache-2.0" +# Do not automatically discover benchmarks, we specify them manually instead. +autobenches = false [dev-dependencies] -glam = "0.29" -rand = "0.8" -rand_chacha = "0.3" -criterion = { version = "0.3", features = ["html_reports"] } +# Bevy crates bevy_app = { path = "../crates/bevy_app" } bevy_ecs = { path = "../crates/bevy_ecs", features = ["multi_threaded"] } bevy_hierarchy = { path = "../crates/bevy_hierarchy" } bevy_math = { path = "../crates/bevy_math" } +bevy_picking = { path = "../crates/bevy_picking", features = [ + "bevy_mesh_picking_backend", +] } bevy_reflect = { path = "../crates/bevy_reflect", features = ["functions"] } bevy_render = { path = "../crates/bevy_render" } bevy_tasks = { path = "../crates/bevy_tasks" } bevy_utils = { path = "../crates/bevy_utils" } -# make bevy_render compile on linux. x11 vs wayland does not matter here as the benches do not actually use a window +# Other crates +criterion = { version = "0.5.1", features = ["html_reports"] } +glam = "0.29" +rand = "0.8" +rand_chacha = "0.3" + +# Make `bevy_render` compile on Linux with x11 windowing. x11 vs. Wayland does not matter here +# because the benches do not actually open any windows. [target.'cfg(target_os = "linux")'.dev-dependencies] bevy_winit = { path = "../crates/bevy_winit", features = ["x11"] } @@ -27,57 +36,32 @@ bevy_winit = { path = "../crates/bevy_winit", features = ["x11"] } opt-level = 3 lto = true -[[bench]] -name = "change_detection" -path = "benches/bevy_ecs/change_detection.rs" -harness = false - [[bench]] name = "ecs" -path = "benches/bevy_ecs/benches.rs" -harness = false - -[[bench]] -name = "reflect_function" -path = "benches/bevy_reflect/function.rs" -harness = false - -[[bench]] -name = "reflect_list" -path = "benches/bevy_reflect/list.rs" -harness = false - -[[bench]] -name = "reflect_map" -path = "benches/bevy_reflect/map.rs" -harness = false - -[[bench]] -name = "reflect_struct" -path = "benches/bevy_reflect/struct.rs" +path = "benches/bevy_ecs/main.rs" harness = false [[bench]] -name = "parse_reflect_path" -path = "benches/bevy_reflect/path.rs" +name = "math" +path = "benches/bevy_math/main.rs" harness = false [[bench]] -name = "iter" -path = "benches/bevy_tasks/iter.rs" +name = "picking" +path = "benches/bevy_picking/main.rs" harness = false [[bench]] -name = "bezier" -path = "benches/bevy_math/bezier.rs" +name = "reflect" +path = "benches/bevy_reflect/main.rs" harness = false [[bench]] -name = "torus" -path = "benches/bevy_render/torus.rs" +name = "render" +path = "benches/bevy_render/main.rs" harness = false [[bench]] -name = "entity_hash" -path = "benches/bevy_ecs/world/entity_hash.rs" +name = "tasks" +path = "benches/bevy_tasks/main.rs" harness = false diff --git a/benches/benches/bevy_ecs/benches.rs b/benches/benches/bevy_ecs/benches.rs deleted file mode 100644 index 1392536a7da0bf..00000000000000 --- a/benches/benches/bevy_ecs/benches.rs +++ /dev/null @@ -1,19 +0,0 @@ -use criterion::criterion_main; - -mod components; -mod events; -mod fragmentation; -mod iteration; -mod observers; -mod scheduling; -mod world; - -criterion_main!( - components::components_benches, - events::event_benches, - iteration::iterations_benches, - fragmentation::fragmentation_benches, - observers::observer_benches, - scheduling::scheduling_benches, - world::world_benches, -); diff --git a/benches/benches/bevy_ecs/change_detection.rs b/benches/benches/bevy_ecs/change_detection.rs index 6c4428efed8bbd..a07320cf1809b0 100644 --- a/benches/benches/bevy_ecs/change_detection.rs +++ b/benches/benches/bevy_ecs/change_detection.rs @@ -1,11 +1,11 @@ use bevy_ecs::{ - component::Component, + component::{Component, Mutable}, entity::Entity, prelude::{Added, Changed, EntityWorldMut, QueryState}, query::QueryFilter, world::World, }; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, Criterion}; use rand::{prelude::SliceRandom, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -17,7 +17,6 @@ criterion_group!( none_changed_detection, multiple_archetype_none_changed_detection ); -criterion_main!(benches); macro_rules! modify { ($components:ident;$($index:tt),*) => { @@ -96,7 +95,7 @@ fn all_added_detection_generic(group: &mut BenchGroup, e }, |(ref mut world, ref mut query)| { let mut count = 0; - for entity in query.iter(&world) { + for entity in query.iter(world) { black_box(entity); count += 1; } @@ -124,7 +123,7 @@ fn all_added_detection(criterion: &mut Criterion) { } } -fn all_changed_detection_generic( +fn all_changed_detection_generic + Default + BenchModify>( group: &mut BenchGroup, entity_count: u32, ) { @@ -144,7 +143,7 @@ fn all_changed_detection_generic( }, |(ref mut world, ref mut query)| { let mut count = 0; - for entity in query.iter(&world) { + for entity in query.iter(world) { black_box(entity); count += 1; } @@ -172,7 +171,7 @@ fn all_changed_detection(criterion: &mut Criterion) { } } -fn few_changed_detection_generic( +fn few_changed_detection_generic + Default + BenchModify>( group: &mut BenchGroup, entity_count: u32, ) { @@ -196,7 +195,7 @@ fn few_changed_detection_generic( (world, query) }, |(ref mut world, ref mut query)| { - for entity in query.iter(&world) { + for entity in query.iter(world) { black_box(entity); } }, @@ -222,7 +221,7 @@ fn few_changed_detection(criterion: &mut Criterion) { } } -fn none_changed_detection_generic( +fn none_changed_detection_generic + Default>( group: &mut BenchGroup, entity_count: u32, ) { @@ -238,7 +237,7 @@ fn none_changed_detection_generic( }, |(ref mut world, ref mut query)| { let mut count = 0; - for entity in query.iter(&world) { + for entity in query.iter(world) { black_box(entity); count += 1; } @@ -271,7 +270,7 @@ fn insert_if_bit_enabled(entity: &mut EntityWorldMut, i: u16) { } } -fn add_archetypes_entities( +fn add_archetypes_entities + Default>( world: &mut World, archetype_count: u16, entity_count: u32, @@ -298,7 +297,9 @@ fn add_archetypes_entities( } } } -fn multiple_archetype_none_changed_detection_generic( +fn multiple_archetype_none_changed_detection_generic< + T: Component + Default + BenchModify, +>( group: &mut BenchGroup, archetype_count: u16, entity_count: u32, @@ -342,7 +343,7 @@ fn multiple_archetype_none_changed_detection_generic>(); - + .spawn_batch(core::iter::repeat(A(0.)).take(10000)) + .collect(); Self(world, entities) } pub fn run(&mut self) { for entity in &self.1 { - self.0.insert_one(*entity, B(0.0)).unwrap(); + self.0.entity_mut(*entity).insert(B(0.)); } for entity in &self.1 { - self.0.remove_one::(*entity).unwrap(); + self.0.entity_mut(*entity).remove::(); } } } diff --git a/benches/benches/bevy_ecs/components/mod.rs b/benches/benches/bevy_ecs/components/mod.rs index 592f40fba7d077..aec44ed27c9d09 100644 --- a/benches/benches/bevy_ecs/components/mod.rs +++ b/benches/benches/bevy_ecs/components/mod.rs @@ -1,5 +1,4 @@ -use criterion::*; - +mod add_remove; mod add_remove_big_sparse_set; mod add_remove_big_table; mod add_remove_sparse_set; @@ -10,9 +9,10 @@ mod insert_simple; mod insert_simple_unbatched; use archetype_updates::*; +use criterion::{criterion_group, Criterion}; criterion_group!( - components_benches, + benches, add_remove, add_remove_big, add_remove_very_big, diff --git a/benches/benches/bevy_ecs/empty_archetypes.rs b/benches/benches/bevy_ecs/empty_archetypes.rs index d6521303f6fc0b..139f52ce843d2d 100644 --- a/benches/benches/bevy_ecs/empty_archetypes.rs +++ b/benches/benches/bevy_ecs/empty_archetypes.rs @@ -1,9 +1,7 @@ -use bevy_ecs::{component::Component, prelude::*, world::World}; -use bevy_tasks::{ComputeTaskPool, TaskPool}; -use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use bevy_ecs::{component::Component, prelude::*, schedule::ExecutorKind, world::World}; +use criterion::{black_box, criterion_group, BenchmarkId, Criterion}; criterion_group!(benches, empty_archetypes); -criterion_main!(benches); #[derive(Component)] struct A(f32); @@ -47,13 +45,12 @@ fn for_each( &A<12>, )>, ) { - query.for_each(|comp| { + query.iter().for_each(|comp| { black_box(comp); }); } fn par_for_each( - task_pool: Res, query: Query<( &A<0>, &A<1>, @@ -70,25 +67,29 @@ fn par_for_each( &A<12>, )>, ) { - query.par_for_each(&*task_pool, 64, |comp| { + query.par_iter().for_each(|comp| { black_box(comp); }); } fn setup(parallel: bool, setup: impl FnOnce(&mut Schedule)) -> (World, Schedule) { - let mut world = World::new(); + let world = World::new(); let mut schedule = Schedule::default(); - if parallel { - world.insert_resource(ComputeTaskPool(TaskPool::default())); - } + + schedule.set_executor_kind(match parallel { + true => ExecutorKind::MultiThreaded, + false => ExecutorKind::SingleThreaded, + }); + setup(&mut schedule); + (world, schedule) } /// create `count` entities with distinct archetypes fn add_archetypes(world: &mut World, count: u16) { for i in 0..count { - let mut e = world.spawn(); + let mut e = world.spawn_empty(); e.insert(A::<0>(1.0)); e.insert(A::<1>(1.0)); e.insert(A::<2>(1.0)); @@ -158,7 +159,7 @@ fn empty_archetypes(criterion: &mut Criterion) { }); add_archetypes(&mut world, archetype_count); world.clear_entities(); - let mut e = world.spawn(); + let mut e = world.spawn_empty(); e.insert(A::<0>(1.0)); e.insert(A::<1>(1.0)); e.insert(A::<2>(1.0)); @@ -189,7 +190,7 @@ fn empty_archetypes(criterion: &mut Criterion) { }); add_archetypes(&mut world, archetype_count); world.clear_entities(); - let mut e = world.spawn(); + let mut e = world.spawn_empty(); e.insert(A::<0>(1.0)); e.insert(A::<1>(1.0)); e.insert(A::<2>(1.0)); @@ -220,7 +221,7 @@ fn empty_archetypes(criterion: &mut Criterion) { }); add_archetypes(&mut world, archetype_count); world.clear_entities(); - let mut e = world.spawn(); + let mut e = world.spawn_empty(); e.insert(A::<0>(1.0)); e.insert(A::<1>(1.0)); e.insert(A::<2>(1.0)); diff --git a/benches/benches/bevy_ecs/events/mod.rs b/benches/benches/bevy_ecs/events/mod.rs index 5d20ef25cd018d..4367c45c3e28d4 100644 --- a/benches/benches/bevy_ecs/events/mod.rs +++ b/benches/benches/bevy_ecs/events/mod.rs @@ -1,9 +1,9 @@ -use criterion::*; - mod iter; mod send; -criterion_group!(event_benches, send, iter); +use criterion::{criterion_group, Criterion}; + +criterion_group!(benches, send, iter); fn send(c: &mut Criterion) { let mut group = c.benchmark_group("events_send"); diff --git a/benches/benches/bevy_ecs/fragmentation/mod.rs b/benches/benches/bevy_ecs/fragmentation.rs similarity index 98% rename from benches/benches/bevy_ecs/fragmentation/mod.rs rename to benches/benches/bevy_ecs/fragmentation.rs index ae44aae4a48c5b..97864990a37e55 100644 --- a/benches/benches/bevy_ecs/fragmentation/mod.rs +++ b/benches/benches/bevy_ecs/fragmentation.rs @@ -1,10 +1,10 @@ use bevy_ecs::prelude::*; use bevy_ecs::system::SystemState; +use core::hint::black_box; use criterion::*; use glam::*; -use core::hint::black_box; -criterion_group!(fragmentation_benches, iter_frag_empty); +criterion_group!(benches, iter_frag_empty); #[derive(Component, Default)] struct Table(usize); diff --git a/benches/benches/bevy_ecs/iteration/mod.rs b/benches/benches/bevy_ecs/iteration/mod.rs index 62d8b166040dcf..0fa7aced2894a9 100644 --- a/benches/benches/bevy_ecs/iteration/mod.rs +++ b/benches/benches/bevy_ecs/iteration/mod.rs @@ -1,5 +1,3 @@ -use criterion::*; - mod heavy_compute; mod iter_frag; mod iter_frag_foreach; @@ -22,10 +20,11 @@ mod iter_simple_wide_sparse_set; mod par_iter_simple; mod par_iter_simple_foreach_hybrid; +use criterion::{criterion_group, Criterion}; use heavy_compute::*; criterion_group!( - iterations_benches, + benches, iter_frag, iter_frag_sparse, iter_simple, @@ -136,7 +135,7 @@ fn par_iter_simple(c: &mut Criterion) { b.iter(move || bench.run()); }); } - group.bench_function(format!("hybrid"), |b| { + group.bench_function("hybrid".to_string(), |b| { let mut bench = par_iter_simple_foreach_hybrid::Benchmark::new(); b.iter(move || bench.run()); }); diff --git a/benches/benches/bevy_ecs/main.rs b/benches/benches/bevy_ecs/main.rs new file mode 100644 index 00000000000000..83f0cde0286d61 --- /dev/null +++ b/benches/benches/bevy_ecs/main.rs @@ -0,0 +1,31 @@ +#![expect( + dead_code, + reason = "Many fields are unused/unread as they are just for benchmarking purposes." +)] +#![expect(clippy::type_complexity)] + +use criterion::criterion_main; + +mod change_detection; +mod components; +mod empty_archetypes; +mod events; +mod fragmentation; +mod iteration; +mod observers; +mod param; +mod scheduling; +mod world; + +criterion_main!( + change_detection::benches, + components::benches, + empty_archetypes::benches, + events::benches, + iteration::benches, + fragmentation::benches, + observers::benches, + scheduling::benches, + world::benches, + param::benches, +); diff --git a/benches/benches/bevy_ecs/observers/mod.rs b/benches/benches/bevy_ecs/observers/mod.rs index 0b8c3f24869ce0..16008def7e4610 100644 --- a/benches/benches/bevy_ecs/observers/mod.rs +++ b/benches/benches/bevy_ecs/observers/mod.rs @@ -1,8 +1,8 @@ -use criterion::criterion_group; - mod propagation; mod simple; + +use criterion::criterion_group; use propagation::*; use simple::*; -criterion_group!(observer_benches, event_propagation, observe_simple); +criterion_group!(benches, event_propagation, observe_simple); diff --git a/benches/benches/bevy_ecs/observers/propagation.rs b/benches/benches/bevy_ecs/observers/propagation.rs index b702662e7dd8cb..5de85bc3269b27 100644 --- a/benches/benches/bevy_ecs/observers/propagation.rs +++ b/benches/benches/bevy_ecs/observers/propagation.rs @@ -1,9 +1,5 @@ use bevy_ecs::{ - component::Component, - entity::Entity, - event::Event, - observer::Trigger, - world::World, + component::Component, entity::Entity, event::Event, observer::Trigger, world::World, }; use bevy_hierarchy::{BuildChildren, Parent}; @@ -75,7 +71,7 @@ impl Event for TestEvent { const AUTO_PROPAGATE: bool = true; } -fn send_events(world: &mut World, leaves: &Vec) { +fn send_events(world: &mut World, leaves: &[Entity]) { let target = leaves.iter().choose(&mut rand::thread_rng()).unwrap(); (0..N_EVENTS).for_each(|_| { @@ -104,9 +100,9 @@ fn spawn_listener_hierarchy(world: &mut World) -> (Vec, Vec, Vec } fn add_listeners_to_hierarchy( - roots: &Vec, - leaves: &Vec, - nodes: &Vec, + roots: &[Entity], + leaves: &[Entity], + nodes: &[Entity], world: &mut World, ) { for e in roots.iter() { diff --git a/benches/benches/bevy_ecs/observers/simple.rs b/benches/benches/bevy_ecs/observers/simple.rs index 4d4d5bc2aa852a..bc42710cf96c39 100644 --- a/benches/benches/bevy_ecs/observers/simple.rs +++ b/benches/benches/bevy_ecs/observers/simple.rs @@ -17,7 +17,7 @@ pub fn observe_simple(criterion: &mut Criterion) { group.bench_function("trigger_simple", |bencher| { let mut world = World::new(); - world.observe(empty_listener_base); + world.add_observer(empty_listener_base); bencher.iter(|| { for _ in 0..10000 { world.trigger(EventBase) diff --git a/benches/benches/bevy_ecs/param/combinator_system.rs b/benches/benches/bevy_ecs/param/combinator_system.rs new file mode 100644 index 00000000000000..5d15c30a72e3e6 --- /dev/null +++ b/benches/benches/bevy_ecs/param/combinator_system.rs @@ -0,0 +1,31 @@ +use bevy_ecs::prelude::*; +use criterion::Criterion; + +pub fn combinator_system(criterion: &mut Criterion) { + let mut world = World::new(); + let mut group = criterion.benchmark_group("param/combinator_system"); + + group.warm_up_time(core::time::Duration::from_millis(500)); + group.measurement_time(core::time::Duration::from_secs(3)); + + let mut schedule = Schedule::default(); + schedule.add_systems( + (|| {}) + .pipe(|| {}) + .pipe(|| {}) + .pipe(|| {}) + .pipe(|| {}) + .pipe(|| {}) + .pipe(|| {}) + .pipe(|| {}), + ); + // run once to initialize systems + schedule.run(&mut world); + group.bench_function("8_piped_systems", |bencher| { + bencher.iter(|| { + schedule.run(&mut world); + }); + }); + + group.finish(); +} diff --git a/benches/benches/bevy_ecs/param/dyn_param.rs b/benches/benches/bevy_ecs/param/dyn_param.rs new file mode 100644 index 00000000000000..33de52bf13560c --- /dev/null +++ b/benches/benches/bevy_ecs/param/dyn_param.rs @@ -0,0 +1,49 @@ +use bevy_ecs::{ + prelude::*, + system::{DynParamBuilder, DynSystemParam, ParamBuilder}, +}; +use criterion::Criterion; + +pub fn dyn_param(criterion: &mut Criterion) { + let mut world = World::new(); + let mut group = criterion.benchmark_group("param/combinator_system"); + + group.warm_up_time(core::time::Duration::from_millis(500)); + group.measurement_time(core::time::Duration::from_secs(3)); + + #[derive(Resource)] + struct R; + + let mut schedule = Schedule::default(); + let system = ( + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + DynParamBuilder::new::>(ParamBuilder), + ) + .build_state(&mut world) + .build_system( + |_: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam, + _: DynSystemParam| {}, + ); + schedule.add_systems(system); + // run once to initialize systems + schedule.run(&mut world); + group.bench_function("8_dyn_params_system", |bencher| { + bencher.iter(|| { + schedule.run(&mut world); + }); + }); + + group.finish(); +} diff --git a/benches/benches/bevy_ecs/param/mod.rs b/benches/benches/bevy_ecs/param/mod.rs new file mode 100644 index 00000000000000..6cc6132f66787b --- /dev/null +++ b/benches/benches/bevy_ecs/param/mod.rs @@ -0,0 +1,10 @@ +mod combinator_system; +mod dyn_param; +mod param_set; + +use combinator_system::*; +use criterion::criterion_group; +use dyn_param::*; +use param_set::*; + +criterion_group!(benches, combinator_system, dyn_param, param_set); diff --git a/benches/benches/bevy_ecs/param/param_set.rs b/benches/benches/bevy_ecs/param/param_set.rs new file mode 100644 index 00000000000000..0521561b6b804f --- /dev/null +++ b/benches/benches/bevy_ecs/param/param_set.rs @@ -0,0 +1,36 @@ +use bevy_ecs::prelude::*; +use criterion::Criterion; + +pub fn param_set(criterion: &mut Criterion) { + let mut world = World::new(); + let mut group = criterion.benchmark_group("param/combinator_system"); + + group.warm_up_time(core::time::Duration::from_millis(500)); + group.measurement_time(core::time::Duration::from_secs(3)); + + #[derive(Resource)] + struct R; + + let mut schedule = Schedule::default(); + schedule.add_systems( + |_: ParamSet<( + ResMut, + ResMut, + ResMut, + ResMut, + ResMut, + ResMut, + ResMut, + ResMut, + )>| {}, + ); + // run once to initialize systems + schedule.run(&mut world); + group.bench_function("8_variant_param_set_system", |bencher| { + bencher.iter(|| { + schedule.run(&mut world); + }); + }); + + group.finish(); +} diff --git a/benches/benches/bevy_ecs/scheduling/mod.rs b/benches/benches/bevy_ecs/scheduling/mod.rs index 60d1620a89f5c1..310662e629cad9 100644 --- a/benches/benches/bevy_ecs/scheduling/mod.rs +++ b/benches/benches/bevy_ecs/scheduling/mod.rs @@ -1,15 +1,14 @@ -use criterion::criterion_group; - mod run_condition; mod running_systems; mod schedule; +use criterion::criterion_group; use run_condition::*; use running_systems::*; use schedule::*; criterion_group!( - scheduling_benches, + benches, run_condition_yes, run_condition_no, run_condition_yes_with_query, diff --git a/benches/benches/bevy_ecs/scheduling/run_condition.rs b/benches/benches/bevy_ecs/scheduling/run_condition.rs index 1a033f36ef8b83..0d6e4107c6245b 100644 --- a/benches/benches/bevy_ecs/scheduling/run_condition.rs +++ b/benches/benches/bevy_ecs/scheduling/run_condition.rs @@ -25,7 +25,7 @@ pub fn run_condition_yes(criterion: &mut Criterion) { } // run once to initialize systems schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", 5 * amount + 1), |bencher| { + group.bench_function(format!("{:03}_systems", 5 * amount + 1), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); @@ -48,7 +48,7 @@ pub fn run_condition_no(criterion: &mut Criterion) { } // run once to initialize systems schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", 5 * amount + 1), |bencher| { + group.bench_function(format!("{:03}_systems", 5 * amount + 1), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); @@ -67,8 +67,8 @@ pub fn run_condition_yes_with_query(criterion: &mut Criterion) { group.warm_up_time(core::time::Duration::from_millis(500)); group.measurement_time(core::time::Duration::from_secs(3)); fn empty() {} - fn yes_with_query(query: Query<&TestBool>) -> bool { - query.single().0 + fn yes_with_query(query: Single<&TestBool>) -> bool { + query.0 } for amount in 0..21 { let mut schedule = Schedule::default(); @@ -80,7 +80,7 @@ pub fn run_condition_yes_with_query(criterion: &mut Criterion) { } // run once to initialize systems schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", 5 * amount + 1), |bencher| { + group.bench_function(format!("{:03}_systems", 5 * amount + 1), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); @@ -109,7 +109,7 @@ pub fn run_condition_yes_with_resource(criterion: &mut Criterion) { } // run once to initialize systems schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", 5 * amount + 1), |bencher| { + group.bench_function(format!("{:03}_systems", 5 * amount + 1), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); diff --git a/benches/benches/bevy_ecs/scheduling/running_systems.rs b/benches/benches/bevy_ecs/scheduling/running_systems.rs index d2ea51307f87f9..4a1455388549f6 100644 --- a/benches/benches/bevy_ecs/scheduling/running_systems.rs +++ b/benches/benches/bevy_ecs/scheduling/running_systems.rs @@ -26,7 +26,7 @@ pub fn empty_systems(criterion: &mut Criterion) { schedule.add_systems(empty); } schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", amount), |bencher| { + group.bench_function(format!("{:03}_systems", amount), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); @@ -38,7 +38,7 @@ pub fn empty_systems(criterion: &mut Criterion) { schedule.add_systems((empty, empty, empty, empty, empty)); } schedule.run(&mut world); - group.bench_function(&format!("{:03}_systems", 5 * amount), |bencher| { + group.bench_function(format!("{:03}_systems", 5 * amount), |bencher| { bencher.iter(|| { schedule.run(&mut world); }); @@ -80,7 +80,7 @@ pub fn busy_systems(criterion: &mut Criterion) { } schedule.run(&mut world); group.bench_function( - &format!( + format!( "{:02}x_entities_{:02}_systems", entity_bunches, 3 * system_amount + 3 @@ -131,7 +131,7 @@ pub fn contrived(criterion: &mut Criterion) { } schedule.run(&mut world); group.bench_function( - &format!( + format!( "{:02}x_entities_{:02}_systems", entity_bunches, 3 * system_amount + 3 diff --git a/benches/benches/bevy_ecs/scheduling/schedule.rs b/benches/benches/bevy_ecs/scheduling/schedule.rs index 4571899a9b7b50..1a428b5eb85fee 100644 --- a/benches/benches/bevy_ecs/scheduling/schedule.rs +++ b/benches/benches/bevy_ecs/scheduling/schedule.rs @@ -74,7 +74,7 @@ pub fn build_schedule(criterion: &mut Criterion) { // Method: generate a set of `graph_size` systems which have a One True Ordering. // Add system to the schedule with full constraints. Hopefully this should be maximally // difficult for bevy to figure out. - let labels: Vec<_> = (0..1000).map(|i| NumSet(i)).collect(); + let labels: Vec<_> = (0..1000).map(NumSet).collect(); // Benchmark graphs of different sizes. for graph_size in [100, 500, 1000] { diff --git a/benches/benches/bevy_ecs/world/commands.rs b/benches/benches/bevy_ecs/world/commands.rs index 19128f80ba7daa..a1d7cdb09e382d 100644 --- a/benches/benches/bevy_ecs/world/commands.rs +++ b/benches/benches/bevy_ecs/world/commands.rs @@ -1,6 +1,5 @@ use bevy_ecs::{ component::Component, - entity::Entity, system::Commands, world::{Command, CommandQueue, World}, }; @@ -91,7 +90,7 @@ pub fn insert_commands(criterion: &mut Criterion) { command_queue.apply(&mut world); }); }); - group.bench_function("insert_batch", |bencher| { + group.bench_function("insert_or_spawn_batch", |bencher| { let mut world = World::default(); let mut command_queue = CommandQueue::default(); let mut entities = Vec::new(); @@ -109,6 +108,24 @@ pub fn insert_commands(criterion: &mut Criterion) { command_queue.apply(&mut world); }); }); + group.bench_function("insert_batch", |bencher| { + let mut world = World::default(); + let mut command_queue = CommandQueue::default(); + let mut entities = Vec::new(); + for _ in 0..entity_count { + entities.push(world.spawn_empty().id()); + } + + bencher.iter(|| { + let mut commands = Commands::new(&mut command_queue, &world); + let mut values = Vec::with_capacity(entity_count); + for entity in &entities { + values.push((*entity, (Matrix::default(), Vec3::default()))); + } + commands.insert_batch(values); + command_queue.apply(&mut world); + }); + }); group.finish(); } @@ -209,41 +226,3 @@ pub fn medium_sized_commands(criterion: &mut Criterion) { pub fn large_sized_commands(criterion: &mut Criterion) { sized_commands_impl::>(criterion); } - -pub fn get_or_spawn(criterion: &mut Criterion) { - let mut group = criterion.benchmark_group("get_or_spawn"); - group.warm_up_time(core::time::Duration::from_millis(500)); - group.measurement_time(core::time::Duration::from_secs(4)); - - group.bench_function("individual", |bencher| { - let mut world = World::default(); - let mut command_queue = CommandQueue::default(); - - bencher.iter(|| { - let mut commands = Commands::new(&mut command_queue, &world); - for i in 0..10_000 { - commands - .get_or_spawn(Entity::from_raw(i)) - .insert((Matrix::default(), Vec3::default())); - } - command_queue.apply(&mut world); - }); - }); - - group.bench_function("batched", |bencher| { - let mut world = World::default(); - let mut command_queue = CommandQueue::default(); - - bencher.iter(|| { - let mut commands = Commands::new(&mut command_queue, &world); - let mut values = Vec::with_capacity(10_000); - for i in 0..10_000 { - values.push((Entity::from_raw(i), (Matrix::default(), Vec3::default()))); - } - commands.insert_or_spawn_batch(values); - command_queue.apply(&mut world); - }); - }); - - group.finish(); -} diff --git a/benches/benches/bevy_ecs/world/despawn.rs b/benches/benches/bevy_ecs/world/despawn.rs new file mode 100644 index 00000000000000..ace88e744a482a --- /dev/null +++ b/benches/benches/bevy_ecs/world/despawn.rs @@ -0,0 +1,32 @@ +use bevy_ecs::prelude::*; +use criterion::Criterion; +use glam::*; + +#[derive(Component)] +struct A(Mat4); +#[derive(Component)] +struct B(Vec4); + +pub fn world_despawn(criterion: &mut Criterion) { + let mut group = criterion.benchmark_group("despawn_world"); + group.warm_up_time(core::time::Duration::from_millis(500)); + group.measurement_time(core::time::Duration::from_secs(4)); + + for entity_count in (0..5).map(|i| 10_u32.pow(i)) { + let mut world = World::default(); + for _ in 0..entity_count { + world.spawn((A(Mat4::default()), B(Vec4::default()))); + } + + let ents = world.iter_entities().map(|e| e.id()).collect::>(); + group.bench_function(format!("{}_entities", entity_count), |bencher| { + bencher.iter(|| { + ents.iter().for_each(|e| { + world.despawn(*e); + }); + }); + }); + } + + group.finish(); +} diff --git a/benches/benches/bevy_ecs/world/despawn_recursive.rs b/benches/benches/bevy_ecs/world/despawn_recursive.rs new file mode 100644 index 00000000000000..482086ab174449 --- /dev/null +++ b/benches/benches/bevy_ecs/world/despawn_recursive.rs @@ -0,0 +1,39 @@ +use bevy_ecs::prelude::*; +use bevy_hierarchy::despawn_with_children_recursive; +use bevy_hierarchy::BuildChildren; +use bevy_hierarchy::ChildBuild; +use criterion::Criterion; +use glam::*; + +#[derive(Component)] +struct A(Mat4); +#[derive(Component)] +struct B(Vec4); + +pub fn world_despawn_recursive(criterion: &mut Criterion) { + let mut group = criterion.benchmark_group("despawn_world_recursive"); + group.warm_up_time(core::time::Duration::from_millis(500)); + group.measurement_time(core::time::Duration::from_secs(4)); + + for entity_count in (0..5).map(|i| 10_u32.pow(i)) { + let mut world = World::default(); + for _ in 0..entity_count { + world + .spawn((A(Mat4::default()), B(Vec4::default()))) + .with_children(|parent| { + parent.spawn((A(Mat4::default()), B(Vec4::default()))); + }); + } + + let ents = world.iter_entities().map(|e| e.id()).collect::>(); + group.bench_function(format!("{}_entities", entity_count), |bencher| { + bencher.iter(|| { + ents.iter().for_each(|e| { + despawn_with_children_recursive(&mut world, *e, true); + }); + }); + }); + } + + group.finish(); +} diff --git a/benches/benches/bevy_ecs/world/entity_hash.rs b/benches/benches/bevy_ecs/world/entity_hash.rs index 3bd148d90da63d..d4ba9b659820f1 100644 --- a/benches/benches/bevy_ecs/world/entity_hash.rs +++ b/benches/benches/bevy_ecs/world/entity_hash.rs @@ -1,11 +1,8 @@ use bevy_ecs::entity::{Entity, EntityHashSet}; -use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use criterion::{BenchmarkId, Criterion, Throughput}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; -criterion_group!(benches, entity_set_build_and_lookup,); -criterion_main!(benches); - const SIZES: [usize; 5] = [100, 316, 1000, 3162, 10000]; fn make_entity(rng: &mut impl Rng, size: usize) -> Entity { diff --git a/benches/benches/bevy_ecs/world/mod.rs b/benches/benches/bevy_ecs/world/mod.rs index 8b12a08fcd783d..e35dc999c2eb88 100644 --- a/benches/benches/bevy_ecs/world/mod.rs +++ b/benches/benches/bevy_ecs/world/mod.rs @@ -1,19 +1,20 @@ -use criterion::criterion_group; - mod commands; -use commands::*; - +mod despawn; +mod despawn_recursive; +mod entity_hash; mod spawn; -use spawn::*; - mod world_get; -use world_get::*; -mod entity_hash; +use commands::*; +use criterion::criterion_group; +use despawn::*; +use despawn_recursive::*; use entity_hash::*; +use spawn::*; +use world_get::*; criterion_group!( - world_benches, + benches, empty_commands, spawn_commands, insert_commands, @@ -21,16 +22,17 @@ criterion_group!( zero_sized_commands, medium_sized_commands, large_sized_commands, - get_or_spawn, world_entity, world_get, world_query_get, world_query_iter, world_query_for_each, world_spawn, + world_despawn, + world_despawn_recursive, query_get, query_get_many::<2>, query_get_many::<5>, query_get_many::<10>, - entity_set_build_and_lookup + entity_set_build_and_lookup, ); diff --git a/benches/benches/bevy_ecs/world/world_get.rs b/benches/benches/bevy_ecs/world/world_get.rs index 4c235cd1b46e3a..190402fbadb27e 100644 --- a/benches/benches/bevy_ecs/world/world_get.rs +++ b/benches/benches/bevy_ecs/world/world_get.rs @@ -306,7 +306,7 @@ pub fn query_get(criterion: &mut Criterion) { } pub fn query_get_many(criterion: &mut Criterion) { - let mut group = criterion.benchmark_group(&format!("query_get_many_{N}")); + let mut group = criterion.benchmark_group(format!("query_get_many_{N}")); group.warm_up_time(core::time::Duration::from_millis(500)); group.measurement_time(core::time::Duration::from_secs(2 * N as u64)); diff --git a/benches/benches/bevy_math/bezier.rs b/benches/benches/bevy_math/bezier.rs index 69590aa80412d8..c367cb30298dde 100644 --- a/benches/benches/bevy_math/bezier.rs +++ b/benches/benches/bevy_math/bezier.rs @@ -1,6 +1,6 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, Criterion}; -use bevy_math::{prelude::*, *}; +use bevy_math::prelude::*; fn easing(c: &mut Criterion) { let cubic_bezier = CubicSegment::new_bezier(vec2(0.25, 0.1), vec2(0.25, 1.0)); @@ -92,4 +92,3 @@ criterion_group!( build_pos_cubic, build_accel_cubic, ); -criterion_main!(benches); diff --git a/benches/benches/bevy_math/main.rs b/benches/benches/bevy_math/main.rs new file mode 100644 index 00000000000000..7b84b6d60f41b0 --- /dev/null +++ b/benches/benches/bevy_math/main.rs @@ -0,0 +1,5 @@ +use criterion::criterion_main; + +mod bezier; + +criterion_main!(bezier::benches); diff --git a/benches/benches/bevy_picking/main.rs b/benches/benches/bevy_picking/main.rs new file mode 100644 index 00000000000000..d3939d82ee6fcf --- /dev/null +++ b/benches/benches/bevy_picking/main.rs @@ -0,0 +1,5 @@ +use criterion::criterion_main; + +mod ray_mesh_intersection; + +criterion_main!(ray_mesh_intersection::benches); diff --git a/benches/benches/bevy_picking/ray_mesh_intersection.rs b/benches/benches/bevy_picking/ray_mesh_intersection.rs new file mode 100644 index 00000000000000..1d019d43ee37fc --- /dev/null +++ b/benches/benches/bevy_picking/ray_mesh_intersection.rs @@ -0,0 +1,119 @@ +use bevy_math::{Dir3, Mat4, Ray3d, Vec3}; +use bevy_picking::mesh_picking::ray_cast; +use criterion::{black_box, criterion_group, Criterion}; + +fn ptoxznorm(p: u32, size: u32) -> (f32, f32) { + let ij = (p / (size), p % (size)); + (ij.0 as f32 / size as f32, ij.1 as f32 / size as f32) +} + +struct SimpleMesh { + positions: Vec<[f32; 3]>, + normals: Vec<[f32; 3]>, + indices: Vec, +} + +fn mesh_creation(vertices_per_side: u32) -> SimpleMesh { + let mut positions = Vec::new(); + let mut normals = Vec::new(); + for p in 0..vertices_per_side.pow(2) { + let xz = ptoxznorm(p, vertices_per_side); + positions.push([xz.0 - 0.5, 0.0, xz.1 - 0.5]); + normals.push([0.0, 1.0, 0.0]); + } + + let mut indices = vec![]; + for p in 0..vertices_per_side.pow(2) { + if p % (vertices_per_side) != vertices_per_side - 1 + && p / (vertices_per_side) != vertices_per_side - 1 + { + indices.extend_from_slice(&[p, p + 1, p + vertices_per_side]); + indices.extend_from_slice(&[p + vertices_per_side, p + 1, p + vertices_per_side + 1]); + } + } + + SimpleMesh { + positions, + normals, + indices, + } +} + +fn ray_mesh_intersection(c: &mut Criterion) { + let mut group = c.benchmark_group("ray_mesh_intersection"); + group.warm_up_time(std::time::Duration::from_millis(500)); + + for vertices_per_side in [10_u32, 100, 1000] { + group.bench_function(format!("{}_vertices", vertices_per_side.pow(2)), |b| { + let ray = Ray3d::new(Vec3::new(0.0, 1.0, 0.0), Dir3::NEG_Y); + let mesh_to_world = Mat4::IDENTITY; + let mesh = mesh_creation(vertices_per_side); + + b.iter(|| { + black_box(ray_cast::ray_mesh_intersection( + ray, + &mesh_to_world, + &mesh.positions, + Some(&mesh.normals), + Some(&mesh.indices), + ray_cast::Backfaces::Cull, + )); + }); + }); + } +} + +fn ray_mesh_intersection_no_cull(c: &mut Criterion) { + let mut group = c.benchmark_group("ray_mesh_intersection_no_cull"); + group.warm_up_time(std::time::Duration::from_millis(500)); + + for vertices_per_side in [10_u32, 100, 1000] { + group.bench_function(format!("{}_vertices", vertices_per_side.pow(2)), |b| { + let ray = Ray3d::new(Vec3::new(0.0, 1.0, 0.0), Dir3::NEG_Y); + let mesh_to_world = Mat4::IDENTITY; + let mesh = mesh_creation(vertices_per_side); + + b.iter(|| { + black_box(ray_cast::ray_mesh_intersection( + ray, + &mesh_to_world, + &mesh.positions, + Some(&mesh.normals), + Some(&mesh.indices), + ray_cast::Backfaces::Include, + )); + }); + }); + } +} + +fn ray_mesh_intersection_no_intersection(c: &mut Criterion) { + let mut group = c.benchmark_group("ray_mesh_intersection_no_intersection"); + group.warm_up_time(std::time::Duration::from_millis(500)); + + for vertices_per_side in [10_u32, 100, 1000] { + group.bench_function(format!("{}_vertices", (vertices_per_side).pow(2)), |b| { + let ray = Ray3d::new(Vec3::new(0.0, 1.0, 0.0), Dir3::X); + let mesh_to_world = Mat4::IDENTITY; + let mesh = mesh_creation(vertices_per_side); + + b.iter(|| { + black_box(ray_cast::ray_mesh_intersection( + ray, + &mesh_to_world, + &mesh.positions, + Some(&mesh.normals), + Some(&mesh.indices), + ray_cast::Backfaces::Cull, + )); + }); + }); + } +} + +criterion_group!( + benches, + ray_mesh_intersection, + ray_mesh_intersection_no_cull, + ray_mesh_intersection_no_intersection +); diff --git a/benches/benches/bevy_reflect/function.rs b/benches/benches/bevy_reflect/function.rs index 03673d3a9a2eb1..f40b9149eec64f 100644 --- a/benches/benches/bevy_reflect/function.rs +++ b/benches/benches/bevy_reflect/function.rs @@ -1,8 +1,7 @@ use bevy_reflect::func::{ArgList, IntoFunction, IntoFunctionMut, TypedFunction}; -use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; +use criterion::{criterion_group, BatchSize, Criterion}; -criterion_group!(benches, typed, into, call, clone); -criterion_main!(benches); +criterion_group!(benches, typed, into, call, overload, clone); fn add(a: i32, b: i32) -> i32 { a + b @@ -79,6 +78,307 @@ fn call(c: &mut Criterion) { }); } +fn overload(c: &mut Criterion) { + fn add>(a: T, b: T) -> T { + a + b + } + + fn complex( + _: T0, + _: T1, + _: T2, + _: T3, + _: T4, + _: T5, + _: T6, + _: T7, + _: T8, + _: T9, + ) { + } + + c.benchmark_group("with_overload") + .bench_function("01_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| func.with_overload(add::), + BatchSize::SmallInput, + ); + }) + .bench_function("01_complex_overload", |b| { + b.iter_batched( + || complex::.into_function(), + |func| { + func.with_overload(complex::) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("03_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| { + func.with_overload(add::) + .with_overload(add::) + .with_overload(add::) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("03_complex_overload", |b| { + b.iter_batched( + || complex::.into_function(), + |func| { + func.with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("10_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| { + func.with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("10_complex_overload", |b| { + b.iter_batched( + || complex::.into_function(), + |func| { + func.with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + .with_overload(complex::) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("01_nested_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| func.with_overload(add::), + BatchSize::SmallInput, + ); + }) + .bench_function("03_nested_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| { + func.with_overload( + add:: + .into_function() + .with_overload(add::.into_function().with_overload(add::)), + ) + }, + BatchSize::SmallInput, + ); + }) + .bench_function("10_nested_simple_overload", |b| { + b.iter_batched( + || add::.into_function(), + |func| { + func.with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add::.into_function().with_overload( + add:: + .into_function() + .with_overload(add::), + ), + ), + ), + ), + ), + ), + ), + ) + }, + BatchSize::SmallInput, + ); + }); + + c.benchmark_group("call_overload") + .bench_function("01_simple_overload", |b| { + b.iter_batched( + || { + ( + add::.into_function().with_overload(add::), + ArgList::new().push_owned(75_i8).push_owned(25_i8), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }) + .bench_function("01_complex_overload", |b| { + b.iter_batched( + || { + ( + complex:: + .into_function() + .with_overload( + complex::, + ), + ArgList::new() + .push_owned(1_i8) + .push_owned(2_i16) + .push_owned(3_i32) + .push_owned(4_i64) + .push_owned(5_i128) + .push_owned(6_u8) + .push_owned(7_u16) + .push_owned(8_u32) + .push_owned(9_u64) + .push_owned(10_u128), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }) + .bench_function("03_simple_overload", |b| { + b.iter_batched( + || { + ( + add:: + .into_function() + .with_overload(add::) + .with_overload(add::) + .with_overload(add::), + ArgList::new().push_owned(75_i32).push_owned(25_i32), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }) + .bench_function("03_complex_overload", |b| { + b.iter_batched( + || { + ( + complex:: + .into_function() + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ), + ArgList::new() + .push_owned(1_i32) + .push_owned(2_i64) + .push_owned(3_i128) + .push_owned(4_u8) + .push_owned(5_u16) + .push_owned(6_u32) + .push_owned(7_u64) + .push_owned(8_u128) + .push_owned(9_i8) + .push_owned(10_i16), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }) + .bench_function("10_simple_overload", |b| { + b.iter_batched( + || { + ( + add:: + .into_function() + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::) + .with_overload(add::), + ArgList::new().push_owned(75_u8).push_owned(25_u8), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }) + .bench_function("10_complex_overload", |b| { + b.iter_batched( + || { + ( + complex:: + .into_function() + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ) + .with_overload( + complex::, + ), + ArgList::new() + .push_owned(1_u8) + .push_owned(2_u16) + .push_owned(3_u32) + .push_owned(4_u64) + .push_owned(5_u128) + .push_owned(6_i8) + .push_owned(7_i16) + .push_owned(8_i32) + .push_owned(9_i64) + .push_owned(10_i128), + ) + }, + |(func, args)| func.call(args), + BatchSize::SmallInput, + ); + }); +} + fn clone(c: &mut Criterion) { c.benchmark_group("clone").bench_function("function", |b| { let add = add.into_function(); diff --git a/benches/benches/bevy_reflect/list.rs b/benches/benches/bevy_reflect/list.rs index e5fffaa3cddf0b..d9c92dd03ef061 100644 --- a/benches/benches/bevy_reflect/list.rs +++ b/benches/benches/bevy_reflect/list.rs @@ -2,8 +2,8 @@ use core::{iter, time::Duration}; use bevy_reflect::{DynamicList, List}; use criterion::{ - black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, - BenchmarkGroup, BenchmarkId, Criterion, Throughput, + black_box, criterion_group, measurement::Measurement, BatchSize, BenchmarkGroup, BenchmarkId, + Criterion, Throughput, }; criterion_group!( @@ -13,7 +13,6 @@ criterion_group!( dynamic_list_apply, dynamic_list_push ); -criterion_main!(benches); const WARM_UP_TIME: Duration = Duration::from_millis(500); const MEASUREMENT_TIME: Duration = Duration::from_secs(4); diff --git a/benches/benches/bevy_reflect/main.rs b/benches/benches/bevy_reflect/main.rs new file mode 100644 index 00000000000000..d347baccd0fa3a --- /dev/null +++ b/benches/benches/bevy_reflect/main.rs @@ -0,0 +1,17 @@ +#![expect(clippy::type_complexity)] + +use criterion::criterion_main; + +mod function; +mod list; +mod map; +mod path; +mod r#struct; + +criterion_main!( + function::benches, + list::benches, + map::benches, + path::benches, + r#struct::benches, +); diff --git a/benches/benches/bevy_reflect/map.rs b/benches/benches/bevy_reflect/map.rs index ae3894881358e0..054dcf9570da06 100644 --- a/benches/benches/bevy_reflect/map.rs +++ b/benches/benches/bevy_reflect/map.rs @@ -3,8 +3,8 @@ use core::{fmt::Write, iter, time::Duration}; use bevy_reflect::{DynamicMap, Map}; use bevy_utils::HashMap; use criterion::{ - black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, - BenchmarkGroup, BenchmarkId, Criterion, Throughput, + black_box, criterion_group, measurement::Measurement, BatchSize, BenchmarkGroup, BenchmarkId, + Criterion, Throughput, }; criterion_group!( @@ -14,7 +14,6 @@ criterion_group!( dynamic_map_get, dynamic_map_insert ); -criterion_main!(benches); const WARM_UP_TIME: Duration = Duration::from_millis(500); const MEASUREMENT_TIME: Duration = Duration::from_secs(4); @@ -266,7 +265,7 @@ fn dynamic_map_insert(criterion: &mut Criterion) { |mut map| { for i in 0..size as u64 { let key = black_box(i); - black_box(map.insert(key, i)); + map.insert(key, black_box(i)); } }, BatchSize::SmallInput, diff --git a/benches/benches/bevy_reflect/path.rs b/benches/benches/bevy_reflect/path.rs index f18885132ac3f3..2cca245239e89e 100644 --- a/benches/benches/bevy_reflect/path.rs +++ b/benches/benches/bevy_reflect/path.rs @@ -1,14 +1,11 @@ use core::{fmt::Write, str, time::Duration}; use bevy_reflect::ParsedPath; -use criterion::{ - black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput, -}; +use criterion::{black_box, criterion_group, BatchSize, BenchmarkId, Criterion, Throughput}; use rand::{distributions::Uniform, Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; criterion_group!(benches, parse_reflect_path); -criterion_main!(benches); const WARM_UP_TIME: Duration = Duration::from_millis(500); const MEASUREMENT_TIME: Duration = Duration::from_secs(2); @@ -20,7 +17,7 @@ fn deterministic_rand() -> ChaCha8Rng { ChaCha8Rng::seed_from_u64(42) } fn random_ident(rng: &mut ChaCha8Rng, f: &mut dyn Write) { - let between = Uniform::try_from(b'a'..=b'z').unwrap(); + let between = Uniform::from(b'a'..=b'z'); let ident_size = rng.gen_range(1..128); let ident: Vec = rng.sample_iter(between).take(ident_size).collect(); let ident = str::from_utf8(&ident).unwrap(); @@ -82,9 +79,9 @@ fn parse_reflect_path(criterion: &mut Criterion) { BenchmarkId::new("parse_reflect_path", size), &size, |bencher, &size| { - let mut mk_paths = mk_paths(size); + let mk_paths = mk_paths(size); bencher.iter_batched( - || mk_paths(), + mk_paths, |path| assert!(ParsedPath::parse(black_box(&path)).is_ok()), BatchSize::SmallInput, ); diff --git a/benches/benches/bevy_reflect/struct.rs b/benches/benches/bevy_reflect/struct.rs index 0a38088666f843..dfd324e7053e6e 100644 --- a/benches/benches/bevy_reflect/struct.rs +++ b/benches/benches/bevy_reflect/struct.rs @@ -1,9 +1,7 @@ use core::time::Duration; use bevy_reflect::{DynamicStruct, GetField, PartialReflect, Reflect, Struct}; -use criterion::{ - black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput, -}; +use criterion::{black_box, criterion_group, BatchSize, BenchmarkId, Criterion, Throughput}; criterion_group!( benches, @@ -16,7 +14,6 @@ criterion_group!( dynamic_struct_get_field, dynamic_struct_insert, ); -criterion_main!(benches); const WARM_UP_TIME: Duration = Duration::from_millis(500); const MEASUREMENT_TIME: Duration = Duration::from_secs(4); @@ -316,7 +313,7 @@ fn dynamic_struct_insert(criterion: &mut Criterion) { bencher.iter_batched( || s.clone_dynamic(), |mut s| { - black_box(s.insert(black_box(&field), ())); + s.insert(black_box(&field), ()); }, BatchSize::SmallInput, ); diff --git a/benches/benches/bevy_render/main.rs b/benches/benches/bevy_render/main.rs new file mode 100644 index 00000000000000..7a369bc9057055 --- /dev/null +++ b/benches/benches/bevy_render/main.rs @@ -0,0 +1,6 @@ +use criterion::criterion_main; + +mod render_layers; +mod torus; + +criterion_main!(render_layers::benches, torus::benches); diff --git a/benches/benches/bevy_render/render_layers.rs b/benches/benches/bevy_render/render_layers.rs index 84f6b8907754c5..42dd5356b55ed9 100644 --- a/benches/benches/bevy_render/render_layers.rs +++ b/benches/benches/bevy_render/render_layers.rs @@ -1,4 +1,4 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, Criterion}; use bevy_render::view::RenderLayers; @@ -6,14 +6,8 @@ fn render_layers(c: &mut Criterion) { c.bench_function("layers_intersect", |b| { let layer_a = RenderLayers::layer(1).with(2); let layer_b = RenderLayers::layer(1); - b.iter(|| { - black_box(layer_a.intersects(&layer_b)) - }); + b.iter(|| black_box(layer_a.intersects(&layer_b))); }); } -criterion_group!( - benches, - render_layers, -); -criterion_main!(benches); +criterion_group!(benches, render_layers); diff --git a/benches/benches/bevy_render/torus.rs b/benches/benches/bevy_render/torus.rs index 199cc7ce4c5eff..a5ef753bc8ccbb 100644 --- a/benches/benches/bevy_render/torus.rs +++ b/benches/benches/bevy_render/torus.rs @@ -1,4 +1,4 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{black_box, criterion_group, Criterion}; use bevy_render::mesh::TorusMeshBuilder; @@ -8,5 +8,4 @@ fn torus(c: &mut Criterion) { }); } -criterion_group!(benches, torus,); -criterion_main!(benches); +criterion_group!(benches, torus); diff --git a/benches/benches/bevy_tasks/iter.rs b/benches/benches/bevy_tasks/iter.rs index 3d4410926cf11b..4f8f75c8ed0e84 100644 --- a/benches/benches/bevy_tasks/iter.rs +++ b/benches/benches/bevy_tasks/iter.rs @@ -1,5 +1,5 @@ use bevy_tasks::{ParallelIterator, TaskPoolBuilder}; -use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use criterion::{black_box, criterion_group, BenchmarkId, Criterion}; struct ParChunks<'a, T>(core::slice::Chunks<'a, T>); impl<'a, T> ParallelIterator> for ParChunks<'a, T> @@ -141,4 +141,3 @@ fn bench_many_maps(c: &mut Criterion) { } criterion_group!(benches, bench_overhead, bench_for_each, bench_many_maps); -criterion_main!(benches); diff --git a/benches/benches/bevy_tasks/main.rs b/benches/benches/bevy_tasks/main.rs new file mode 100644 index 00000000000000..cfe74f5dca3de2 --- /dev/null +++ b/benches/benches/bevy_tasks/main.rs @@ -0,0 +1,5 @@ +use criterion::criterion_main; + +mod iter; + +criterion_main!(iter::benches); diff --git a/crates/bevy_a11y/Cargo.toml b/crates/bevy_a11y/Cargo.toml index 62cf5ef240f353..82a3da1f297e56 100644 --- a/crates/bevy_a11y/Cargo.toml +++ b/crates/bevy_a11y/Cargo.toml @@ -15,7 +15,7 @@ bevy_derive = { path = "../bevy_derive", version = "0.15.0-dev" } bevy_ecs = { path = "../bevy_ecs", version = "0.15.0-dev" } bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev" } -accesskit = "0.16" +accesskit = "0.17" [lints] workspace = true diff --git a/crates/bevy_a11y/src/lib.rs b/crates/bevy_a11y/src/lib.rs index f73a529f33e0bc..77ddf2073d9fa9 100644 --- a/crates/bevy_a11y/src/lib.rs +++ b/crates/bevy_a11y/src/lib.rs @@ -6,14 +6,19 @@ )] //! Accessibility for Bevy +//! +//! As of Bevy version 0.15 `accesskit` is no longer re-exported from this crate. +//! +//! If you need to use `accesskit`, you will need to add it as a separate dependency in your `Cargo.toml`. +//! +//! Make sure to use the same version of `accesskit` as Bevy. extern crate alloc; use alloc::sync::Arc; use core::sync::atomic::{AtomicBool, Ordering}; -pub use accesskit; -use accesskit::NodeBuilder; +use accesskit::Node; use bevy_app::Plugin; use bevy_derive::{Deref, DerefMut}; use bevy_ecs::{ @@ -84,10 +89,10 @@ impl ManageAccessibilityUpdates { /// If the entity doesn't have a parent, or if the immediate parent doesn't have /// an `AccessibilityNode`, its node will be an immediate child of the primary window. #[derive(Component, Clone, Deref, DerefMut)] -pub struct AccessibilityNode(pub NodeBuilder); +pub struct AccessibilityNode(pub Node); -impl From for AccessibilityNode { - fn from(node: NodeBuilder) -> Self { +impl From for AccessibilityNode { + fn from(node: Node) -> Self { Self(node) } } diff --git a/crates/bevy_animation/Cargo.toml b/crates/bevy_animation/Cargo.toml index ae1e8ee23cdc9e..81e662e5c38c3d 100644 --- a/crates/bevy_animation/Cargo.toml +++ b/crates/bevy_animation/Cargo.toml @@ -27,20 +27,19 @@ bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } bevy_ecs = { path = "../bevy_ecs", version = "0.15.0-dev" } bevy_transform = { path = "../bevy_transform", version = "0.15.0-dev" } bevy_hierarchy = { path = "../bevy_hierarchy", version = "0.15.0-dev" } -bevy_ui = { path = "../bevy_ui", version = "0.15.0-dev", features = [ - "bevy_text", -] } -bevy_text = { path = "../bevy_text", version = "0.15.0-dev" } # other -fixedbitset = "0.5" petgraph = { version = "0.6", features = ["serde-1"] } ron = "0.8" serde = "1" blake3 = { version = "1.0" } -thiserror = "1" +downcast-rs = "1.2.0" +thiserror = { version = "2", default-features = false } +derive_more = { version = "1", default-features = false, features = ["from"] } +either = "1.13" thread_local = "1" uuid = { version = "1.7", features = ["v4"] } +smallvec = "1" [lints] workspace = true diff --git a/crates/bevy_animation/src/animatable.rs b/crates/bevy_animation/src/animatable.rs index 298d0125a208c3..6af653d3077b46 100644 --- a/crates/bevy_animation/src/animatable.rs +++ b/crates/bevy_animation/src/animatable.rs @@ -149,7 +149,8 @@ impl Animatable for Transform { if input.additive { translation += input.weight * Vec3A::from(input.value.translation); scale += input.weight * Vec3A::from(input.value.scale); - rotation = rotation.slerp(input.value.rotation, input.weight); + rotation = + Quat::slerp(Quat::IDENTITY, input.value.rotation, input.weight) * rotation; } else { translation = Vec3A::interpolate( &translation, @@ -181,8 +182,17 @@ impl Animatable for Quat { #[inline] fn blend(inputs: impl Iterator>) -> Self { let mut value = Self::IDENTITY; - for input in inputs { - value = Self::interpolate(&value, &input.value, input.weight); + for BlendInput { + weight, + value: incoming_value, + additive, + } in inputs + { + if additive { + value = Self::slerp(Self::IDENTITY, incoming_value, weight) * value; + } else { + value = Self::interpolate(&value, &incoming_value, weight); + } } value } diff --git a/crates/bevy_animation/src/animation_curves.rs b/crates/bevy_animation/src/animation_curves.rs index c7825e9066f9f0..0e535644cc4f32 100644 --- a/crates/bevy_animation/src/animation_curves.rs +++ b/crates/bevy_animation/src/animation_curves.rs @@ -7,9 +7,9 @@ //! `Curve` that we want to use to animate something. That could be defined in //! a number of different ways, but let's imagine that we've defined it [using a function]: //! -//! # use bevy_math::curve::{Curve, Interval, function_curve}; +//! # use bevy_math::curve::{Curve, Interval, FunctionCurve}; //! # use bevy_math::vec3; -//! let wobble_curve = function_curve( +//! let wobble_curve = FunctionCurve::new( //! Interval::UNIT, //! |t| { vec3(t.cos(), 0.0, 0.0) }, //! ); @@ -22,30 +22,32 @@ //! //! For instance, let's imagine that we want to use the `Vec3` output //! from our curve to animate the [translation component of a `Transform`]. For this, there is -//! the adaptor [`TranslationCurve`], which wraps any `Curve` and turns it into an -//! [`AnimationCurve`] that will use the given curve to animate the entity's translation: +//! the adaptor [`AnimatableCurve`], which wraps any [`Curve`] and [`AnimatableProperty`] and turns it into an +//! [`AnimationCurve`] that will use the given curve to animate the entity's property: //! -//! # use bevy_math::curve::{Curve, Interval, function_curve}; +//! # use bevy_math::curve::{Curve, Interval, FunctionCurve}; //! # use bevy_math::vec3; -//! # use bevy_animation::animation_curves::*; -//! # let wobble_curve = function_curve( +//! # use bevy_transform::components::Transform; +//! # use bevy_animation::{animated_field, animation_curves::*}; +//! # let wobble_curve = FunctionCurve::new( //! # Interval::UNIT, //! # |t| vec3(t.cos(), 0.0, 0.0) //! # ); -//! let wobble_animation = TranslationCurve(wobble_curve); +//! let wobble_animation = AnimatableCurve::new(animated_field!(Transform::translation), wobble_curve); //! -//! And finally, this `AnimationCurve` needs to be added to an [`AnimationClip`] in order to +//! And finally, this [`AnimationCurve`] needs to be added to an [`AnimationClip`] in order to //! actually animate something. This is what that looks like: //! -//! # use bevy_math::curve::{Curve, Interval, function_curve}; -//! # use bevy_animation::{AnimationClip, AnimationTargetId, animation_curves::*}; +//! # use bevy_math::curve::{Curve, Interval, FunctionCurve}; +//! # use bevy_animation::{AnimationClip, AnimationTargetId, animated_field, animation_curves::*}; +//! # use bevy_transform::components::Transform; //! # use bevy_core::Name; //! # use bevy_math::vec3; -//! # let wobble_curve = function_curve( +//! # let wobble_curve = FunctionCurve::new( //! # Interval::UNIT, //! # |t| { vec3(t.cos(), 0.0, 0.0) }, //! # ); -//! # let wobble_animation = TranslationCurve(wobble_curve); +//! # let wobble_animation = AnimatableCurve::new(animated_field!(Transform::translation), wobble_curve); //! # let animation_target_id = AnimationTargetId::from(&Name::new("Test")); //! let mut animation_clip = AnimationClip::default(); //! animation_clip.add_curve_to_target( @@ -59,22 +61,27 @@ //! a [`Curve`], which produces time-related data of some kind, to an [`AnimationCurve`], which //! knows how to apply that data to an entity. //! -//! ## `Transform` +//! ## Animated Fields //! -//! [`Transform`] is special and has its own adaptors: -//! - [`TranslationCurve`], which uses `Vec3` output to animate [`Transform::translation`] -//! - [`RotationCurve`], which uses `Quat` output to animate [`Transform::rotation`] -//! - [`ScaleCurve`], which uses `Vec3` output to animate [`Transform::scale`] +//! The [`animated_field`] macro (which returns an [`AnimatedField`]), in combination with [`AnimatableCurve`] +//! is the easiest way to make an animation curve (see the example above). //! -//! ## Animatable properties +//! This will select a field on a component and pass it to a [`Curve`] with a type that matches the field. //! -//! Animation of arbitrary components can be accomplished using [`AnimatableProperty`] in +//! ## Animatable Properties +//! +//! Animation of arbitrary aspects of entities can be accomplished using [`AnimatableProperty`] in //! conjunction with [`AnimatableCurve`]. See the documentation [there] for details. //! -//! [using a function]: bevy_math::curve::function_curve +//! ## Custom [`AnimationCurve`] and [`AnimationCurveEvaluator`] +//! +//! This is the lowest-level option with the most control, but it is also the most complicated. +//! +//! [using a function]: bevy_math::curve::FunctionCurve //! [translation component of a `Transform`]: bevy_transform::prelude::Transform::translation //! [`AnimationClip`]: crate::AnimationClip //! [there]: AnimatableProperty +//! [`animated_field`]: crate::animated_field use core::{ any::TypeId, @@ -82,91 +89,187 @@ use core::{ marker::PhantomData, }; -use bevy_ecs::{component::Component, world::Mut}; -use bevy_math::{ - curve::{ - cores::{UnevenCore, UnevenCoreError}, - iterable::IterableCurve, - Curve, Interval, - }, - FloatExt, Quat, Vec3, +use bevy_ecs::component::{Component, Mutable}; +use bevy_math::curve::{ + cores::{UnevenCore, UnevenCoreError}, + iterable::IterableCurve, + Curve, Interval, }; -use bevy_reflect::{FromReflect, Reflect, Reflectable, TypePath}; +use bevy_reflect::{FromReflect, Reflect, Reflectable, TypeInfo, Typed}; use bevy_render::mesh::morph::MorphWeights; -use bevy_transform::prelude::Transform; -use crate::{prelude::Animatable, AnimationEntityMut, AnimationEvaluationError}; +use crate::{ + graph::AnimationNodeIndex, + prelude::{Animatable, BlendInput}, + AnimationEntityMut, AnimationEvaluationError, +}; +use bevy_utils::Hashed; +use downcast_rs::{impl_downcast, Downcast}; /// A value on a component that Bevy can animate. /// /// You can implement this trait on a unit struct in order to support animating /// custom components other than transforms and morph weights. Use that type in /// conjunction with [`AnimatableCurve`] (and perhaps [`AnimatableKeyframeCurve`] -/// to define the animation itself). For example, in order to animate font size of a -/// text section from 24 pt. to 80 pt., you might use: +/// to define the animation itself). +/// For example, in order to animate field of view, you might use: /// -/// # use bevy_animation::prelude::AnimatableProperty; +/// # use bevy_animation::{prelude::AnimatableProperty, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId}; /// # use bevy_reflect::Reflect; -/// # use bevy_text::Text; +/// # use std::any::TypeId; +/// # use bevy_render::camera::PerspectiveProjection; /// #[derive(Reflect)] -/// struct FontSizeProperty; +/// struct FieldOfViewProperty; /// -/// impl AnimatableProperty for FontSizeProperty { -/// type Component = Text; +/// impl AnimatableProperty for FieldOfViewProperty { /// type Property = f32; -/// fn get_mut(component: &mut Self::Component) -> Option<&mut Self::Property> { -/// Some(&mut component.sections.get_mut(0)?.style.font_size) +/// fn get_mut<'a>(&self, entity: &'a mut AnimationEntityMut) -> Result<&'a mut Self::Property, AnimationEvaluationError> { +/// let component = entity +/// .get_mut::() +/// .ok_or( +/// AnimationEvaluationError::ComponentNotPresent( +/// TypeId::of::() +/// ) +/// )? +/// .into_inner(); +/// Ok(&mut component.fov) +/// } +/// +/// fn evaluator_id(&self) -> EvaluatorId { +/// EvaluatorId::Type(TypeId::of::()) /// } /// } /// /// You can then create an [`AnimationClip`] to animate this property like so: /// -/// # use bevy_animation::{AnimationClip, AnimationTargetId, VariableCurve}; +/// # use bevy_animation::{AnimationClip, AnimationTargetId, VariableCurve, AnimationEntityMut, AnimationEvaluationError, animation_curves::EvaluatorId}; /// # use bevy_animation::prelude::{AnimatableProperty, AnimatableKeyframeCurve, AnimatableCurve}; /// # use bevy_core::Name; /// # use bevy_reflect::Reflect; -/// # use bevy_text::Text; +/// # use bevy_render::camera::PerspectiveProjection; +/// # use std::any::TypeId; /// # let animation_target_id = AnimationTargetId::from(&Name::new("Test")); -/// # #[derive(Reflect)] -/// # struct FontSizeProperty; -/// # impl AnimatableProperty for FontSizeProperty { -/// # type Component = Text; -/// # type Property = f32; -/// # fn get_mut(component: &mut Self::Component) -> Option<&mut Self::Property> { -/// # Some(&mut component.sections.get_mut(0)?.style.font_size) -/// # } +/// # #[derive(Reflect, Clone)] +/// # struct FieldOfViewProperty; +/// # impl AnimatableProperty for FieldOfViewProperty { +/// # type Property = f32; +/// # fn get_mut<'a>(&self, entity: &'a mut AnimationEntityMut) -> Result<&'a mut Self::Property, AnimationEvaluationError> { +/// # let component = entity +/// # .get_mut::() +/// # .ok_or( +/// # AnimationEvaluationError::ComponentNotPresent( +/// # TypeId::of::() +/// # ) +/// # )? +/// # .into_inner(); +/// # Ok(&mut component.fov) +/// # } +/// # fn evaluator_id(&self) -> EvaluatorId { +/// # EvaluatorId::Type(TypeId::of::()) +/// # } /// # } /// let mut animation_clip = AnimationClip::default(); /// animation_clip.add_curve_to_target( /// animation_target_id, -/// AnimatableKeyframeCurve::new( -/// [ -/// (0.0, 24.0), -/// (1.0, 80.0), -/// ] +/// AnimatableCurve::new( +/// FieldOfViewProperty, +/// AnimatableKeyframeCurve::new([ +/// (0.0, core::f32::consts::PI / 4.0), +/// (1.0, core::f32::consts::PI / 3.0), +/// ]).expect("Failed to create font size curve") /// ) -/// .map(AnimatableCurve::::from_curve) -/// .expect("Failed to create font size curve") /// ); /// /// Here, the use of [`AnimatableKeyframeCurve`] creates a curve out of the given keyframe time-value /// pairs, using the [`Animatable`] implementation of `f32` to interpolate between them. The -/// invocation of [`AnimatableCurve::from_curve`] with `FontSizeProperty` indicates that the `f32` -/// output from that curve is to be used to animate the font size of a `Text` component (as +/// invocation of [`AnimatableCurve::new`] with `FieldOfViewProperty` indicates that the `f32` +/// output from that curve is to be used to animate the font size of a `PerspectiveProjection` component (as /// configured above). /// /// [`AnimationClip`]: crate::AnimationClip -pub trait AnimatableProperty: Reflect + TypePath { - /// The type of the component that the property lives on. - type Component: Component; +pub trait AnimatableProperty: Send + Sync + 'static { + /// The animated property type. + type Property: Animatable; + + /// Retrieves the property from the given `entity`. + fn get_mut<'a>( + &self, + entity: &'a mut AnimationEntityMut, + ) -> Result<&'a mut Self::Property, AnimationEvaluationError>; + + /// The [`EvaluatorId`] used to look up the [`AnimationCurveEvaluator`] for this [`AnimatableProperty`]. + /// For a given animated property, this ID should always be the same to allow things like animation blending to occur. + fn evaluator_id(&self) -> EvaluatorId; +} + +/// A [`Component`] field that can be animated, defined by a function that reads the component and returns +/// the accessed field / property. +/// +/// The best way to create an instance of this type is via the [`animated_field`] macro. +/// +/// `C` is the component being animated, `A` is the type of the [`Animatable`] field on the component, and `F` is an accessor +/// function that accepts a reference to `C` and retrieves the field `A`. +/// +/// [`animated_field`]: crate::animated_field +#[derive(Clone)] +pub struct AnimatedField &mut A> { + func: F, + /// A pre-hashed (component-type-id, reflected-field-index) pair, uniquely identifying a component field + evaluator_id: Hashed<(TypeId, usize)>, + marker: PhantomData<(C, A)>, +} - /// The type of the property to be animated. - type Property: Animatable + FromReflect + Reflectable + Clone + Sync + Debug; +impl AnimatableProperty for AnimatedField +where + C: Component, + A: Animatable + Clone + Sync + Debug, + F: Fn(&mut C) -> &mut A + Send + Sync + 'static, +{ + type Property = A; + fn get_mut<'a>( + &self, + entity: &'a mut AnimationEntityMut, + ) -> Result<&'a mut A, AnimationEvaluationError> { + let c = entity + .get_mut::() + .ok_or_else(|| AnimationEvaluationError::ComponentNotPresent(TypeId::of::()))?; + Ok((self.func)(c.into_inner())) + } - /// Given a reference to the component, returns a reference to the property. + fn evaluator_id(&self) -> EvaluatorId { + EvaluatorId::ComponentField(&self.evaluator_id) + } +} + +impl &mut P + 'static> AnimatedField { + /// Creates a new instance of [`AnimatedField`]. This operates under the assumption that + /// `C` is a reflect-able struct, and that `field_name` is a valid field on that struct. /// - /// If the property couldn't be found, returns `None`. - fn get_mut(component: &mut Self::Component) -> Option<&mut Self::Property>; + /// # Panics + /// If the type of `C` is not a struct or if the `field_name` does not exist. + pub fn new_unchecked(field_name: &str, func: F) -> Self { + let field_index; + if let TypeInfo::Struct(struct_info) = C::type_info() { + field_index = struct_info + .index_of(field_name) + .expect("Field name should exist"); + } else if let TypeInfo::TupleStruct(struct_info) = C::type_info() { + field_index = field_name + .parse() + .expect("Field name should be a valid tuple index"); + if field_index >= struct_info.field_len() { + panic!("Field name should be a valid tuple index"); + } + } else { + panic!("Only structs are supported in `AnimatedField::new_unchecked`") + } + + Self { + func, + evaluator_id: Hashed::new((TypeId::of::(), field_index)), + marker: PhantomData, + } + } } /// This trait collects the additional requirements on top of [`Curve`] needed for a @@ -183,9 +286,24 @@ impl AnimationCompatibleCurve for C where C: Curve + Debug + Clone + #[derive(Reflect, FromReflect)] #[reflect(from_reflect = false)] pub struct AnimatableCurve { - curve: C, - #[reflect(ignore)] - _phantom: PhantomData

, + /// The property selector, which defines what component to access and how to access + /// a property on that component. + pub property: P, + + /// The inner [curve] whose values are used to animate the property. + /// + /// [curve]: Curve + pub curve: C, +} + +/// An [`AnimatableCurveEvaluator`] for [`AnimatableProperty`] instances. +/// +/// You shouldn't ordinarily need to instantiate one of these manually. Bevy +/// will automatically do so when you use an [`AnimatableCurve`] instance. +#[derive(Reflect)] +pub struct AnimatableCurveEvaluator { + evaluator: BasicAnimationCurveEvaluator, + property: Box>, } impl AnimatableCurve @@ -197,22 +315,20 @@ where /// valued in an [animatable property]. /// /// [animatable property]: AnimatableProperty::Property - pub fn from_curve(curve: C) -> Self { - Self { - curve, - _phantom: PhantomData, - } + pub fn new(property: P, curve: C) -> Self { + Self { property, curve } } } impl Clone for AnimatableCurve where C: Clone, + P: Clone, { fn clone(&self) -> Self { Self { curve: self.curve.clone(), - _phantom: PhantomData, + property: self.property.clone(), } } } @@ -228,10 +344,10 @@ where } } -impl AnimationCurve for AnimatableCurve +impl AnimationCurve for AnimatableCurve where - P: AnimatableProperty, - C: AnimationCompatibleCurve, + P: AnimatableProperty + Clone, + C: AnimationCompatibleCurve + Clone, { fn clone_value(&self) -> Box { Box::new(self.clone()) @@ -241,72 +357,119 @@ where self.curve.domain() } - fn apply<'a>( + fn evaluator_id(&self) -> EvaluatorId { + self.property.evaluator_id() + } + + fn create_evaluator(&self) -> Box { + Box::new(AnimatableCurveEvaluator:: { + evaluator: BasicAnimationCurveEvaluator::default(), + property: Box::new(self.property.clone()), + }) + } + + fn apply( &self, + curve_evaluator: &mut dyn AnimationCurveEvaluator, t: f32, - _transform: Option>, - mut entity: AnimationEntityMut<'a>, weight: f32, + graph_node: AnimationNodeIndex, ) -> Result<(), AnimationEvaluationError> { - let mut component = entity.get_mut::().ok_or_else(|| { - AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) - })?; - let property = P::get_mut(&mut component) - .ok_or_else(|| AnimationEvaluationError::PropertyNotPresent(TypeId::of::

()))?; + let curve_evaluator = curve_evaluator + .downcast_mut::>() + .unwrap(); let value = self.curve.sample_clamped(t); - *property = ::interpolate(property, &value, weight); + curve_evaluator + .evaluator + .stack + .push(BasicAnimationCurveEvaluatorStackElement { + value, + weight, + graph_node, + }); Ok(()) } } -/// This type allows a [curve] valued in `Vec3` to become an [`AnimationCurve`] that animates -/// the translation component of a transform. -/// -/// [curve]: Curve -#[derive(Debug, Clone, Reflect, FromReflect)] -#[reflect(from_reflect = false)] -pub struct TranslationCurve(pub C); - -impl AnimationCurve for TranslationCurve -where - C: AnimationCompatibleCurve, -{ - fn clone_value(&self) -> Box { - Box::new(self.clone()) +impl AnimationCurveEvaluator for AnimatableCurveEvaluator { + fn blend(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError> { + self.evaluator.combine(graph_node, /*additive=*/ false) } - fn domain(&self) -> Interval { - self.0.domain() + fn add(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError> { + self.evaluator.combine(graph_node, /*additive=*/ true) } - fn apply<'a>( - &self, - t: f32, - transform: Option>, - _entity: AnimationEntityMut<'a>, + fn push_blend_register( + &mut self, weight: f32, + graph_node: AnimationNodeIndex, ) -> Result<(), AnimationEvaluationError> { - let mut component = transform.ok_or_else(|| { - AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) - })?; - let new_value = self.0.sample_clamped(t); - component.translation = - ::interpolate(&component.translation, &new_value, weight); + self.evaluator.push_blend_register(weight, graph_node) + } + + fn commit<'a>( + &mut self, + mut entity: AnimationEntityMut<'a>, + ) -> Result<(), AnimationEvaluationError> { + let property = self.property.get_mut(&mut entity)?; + *property = self + .evaluator + .stack + .pop() + .ok_or_else(inconsistent::>)? + .value; Ok(()) } } -/// This type allows a [curve] valued in `Quat` to become an [`AnimationCurve`] that animates -/// the rotation component of a transform. +/// This type allows an [`IterableCurve`] valued in `f32` to be used as an [`AnimationCurve`] +/// that animates [morph weights]. /// -/// [curve]: Curve +/// [morph weights]: MorphWeights #[derive(Debug, Clone, Reflect, FromReflect)] #[reflect(from_reflect = false)] -pub struct RotationCurve(pub C); +pub struct WeightsCurve(pub C); -impl AnimationCurve for RotationCurve +#[derive(Reflect)] +struct WeightsCurveEvaluator { + /// The values of the stack, in which each element is a list of morph target + /// weights. + /// + /// The stack elements are concatenated and tightly packed together. + /// + /// The number of elements in this stack will always be a multiple of + /// [`Self::morph_target_count`]. + stack_morph_target_weights: Vec, + + /// The blend weights and graph node indices for each element of the stack. + /// + /// This should have as many elements as there are stack nodes. In other + /// words, `Self::stack_morph_target_weights.len() * + /// Self::morph_target_counts as usize == + /// Self::stack_blend_weights_and_graph_nodes`. + stack_blend_weights_and_graph_nodes: Vec<(f32, AnimationNodeIndex)>, + + /// The morph target weights in the blend register, if any. + /// + /// This field should be ignored if [`Self::blend_register_blend_weight`] is + /// `None`. If non-empty, it will always have [`Self::morph_target_count`] + /// elements in it. + blend_register_morph_target_weights: Vec, + + /// The weight in the blend register. + /// + /// This will be `None` if the blend register is empty. In that case, + /// [`Self::blend_register_morph_target_weights`] will be empty. + blend_register_blend_weight: Option, + + /// The number of morph targets that are to be animated. + morph_target_count: Option, +} + +impl AnimationCurve for WeightsCurve where - C: AnimationCompatibleCurve, + C: IterableCurve + Debug + Clone + Reflectable, { fn clone_value(&self) -> Box { Box::new(self.clone()) @@ -316,139 +479,440 @@ where self.0.domain() } - fn apply<'a>( + fn evaluator_id(&self) -> EvaluatorId { + EvaluatorId::Type(TypeId::of::()) + } + + fn create_evaluator(&self) -> Box { + Box::new(WeightsCurveEvaluator { + stack_morph_target_weights: vec![], + stack_blend_weights_and_graph_nodes: vec![], + blend_register_morph_target_weights: vec![], + blend_register_blend_weight: None, + morph_target_count: None, + }) + } + + fn apply( &self, + curve_evaluator: &mut dyn AnimationCurveEvaluator, t: f32, - transform: Option>, - _entity: AnimationEntityMut<'a>, weight: f32, + graph_node: AnimationNodeIndex, ) -> Result<(), AnimationEvaluationError> { - let mut component = transform.ok_or_else(|| { - AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) - })?; - let new_value = self.0.sample_clamped(t); - component.rotation = - ::interpolate(&component.rotation, &new_value, weight); + let curve_evaluator = curve_evaluator + .downcast_mut::() + .unwrap(); + + let prev_morph_target_weights_len = curve_evaluator.stack_morph_target_weights.len(); + curve_evaluator + .stack_morph_target_weights + .extend(self.0.sample_iter_clamped(t)); + curve_evaluator.morph_target_count = Some( + (curve_evaluator.stack_morph_target_weights.len() - prev_morph_target_weights_len) + as u32, + ); + + curve_evaluator + .stack_blend_weights_and_graph_nodes + .push((weight, graph_node)); Ok(()) } } -/// This type allows a [curve] valued in `Vec3` to become an [`AnimationCurve`] that animates -/// the scale component of a transform. -/// -/// [curve]: Curve -#[derive(Debug, Clone, Reflect, FromReflect)] -#[reflect(from_reflect = false)] -pub struct ScaleCurve(pub C); +impl WeightsCurveEvaluator { + fn combine( + &mut self, + graph_node: AnimationNodeIndex, + additive: bool, + ) -> Result<(), AnimationEvaluationError> { + let Some(&(_, top_graph_node)) = self.stack_blend_weights_and_graph_nodes.last() else { + return Ok(()); + }; + if top_graph_node != graph_node { + return Ok(()); + } -impl AnimationCurve for ScaleCurve -where - C: AnimationCompatibleCurve, -{ - fn clone_value(&self) -> Box { - Box::new(self.clone()) + let (weight_to_blend, _) = self.stack_blend_weights_and_graph_nodes.pop().unwrap(); + let stack_iter = self.stack_morph_target_weights.drain( + (self.stack_morph_target_weights.len() - self.morph_target_count.unwrap() as usize).., + ); + + match self.blend_register_blend_weight { + None => { + self.blend_register_blend_weight = Some(weight_to_blend); + self.blend_register_morph_target_weights.clear(); + + // In the additive case, the values pushed onto the blend register need + // to be scaled by the weight. + if additive { + self.blend_register_morph_target_weights + .extend(stack_iter.map(|m| m * weight_to_blend)); + } else { + self.blend_register_morph_target_weights.extend(stack_iter); + } + } + + Some(ref mut current_weight) => { + *current_weight += weight_to_blend; + for (dest, src) in self + .blend_register_morph_target_weights + .iter_mut() + .zip(stack_iter) + { + if additive { + *dest += src * weight_to_blend; + } else { + *dest = f32::interpolate(dest, &src, weight_to_blend / *current_weight); + } + } + } + } + + Ok(()) + } +} + +impl AnimationCurveEvaluator for WeightsCurveEvaluator { + fn blend(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError> { + self.combine(graph_node, /*additive=*/ false) } - fn domain(&self) -> Interval { - self.0.domain() + fn add(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError> { + self.combine(graph_node, /*additive=*/ true) } - fn apply<'a>( - &self, - t: f32, - transform: Option>, - _entity: AnimationEntityMut<'a>, + fn push_blend_register( + &mut self, weight: f32, + graph_node: AnimationNodeIndex, ) -> Result<(), AnimationEvaluationError> { - let mut component = transform.ok_or_else(|| { - AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) - })?; - let new_value = self.0.sample_clamped(t); - component.scale = ::interpolate(&component.scale, &new_value, weight); + if self.blend_register_blend_weight.take().is_some() { + self.stack_morph_target_weights + .append(&mut self.blend_register_morph_target_weights); + self.stack_blend_weights_and_graph_nodes + .push((weight, graph_node)); + } + Ok(()) + } + + fn commit<'a>( + &mut self, + mut entity: AnimationEntityMut<'a>, + ) -> Result<(), AnimationEvaluationError> { + if self.stack_morph_target_weights.is_empty() { + return Ok(()); + } + + // Compute the index of the first morph target in the last element of + // the stack. + let index_of_first_morph_target = + self.stack_morph_target_weights.len() - self.morph_target_count.unwrap() as usize; + + for (dest, src) in entity + .get_mut::() + .ok_or_else(|| { + AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) + })? + .weights_mut() + .iter_mut() + .zip(self.stack_morph_target_weights[index_of_first_morph_target..].iter()) + { + *dest = *src; + } + self.stack_morph_target_weights.clear(); + self.stack_blend_weights_and_graph_nodes.clear(); Ok(()) } } -/// This type allows an [`IterableCurve`] valued in `f32` to be used as an [`AnimationCurve`] -/// that animates [morph weights]. -/// -/// [morph weights]: MorphWeights -#[derive(Debug, Clone, Reflect, FromReflect)] -#[reflect(from_reflect = false)] -pub struct WeightsCurve(pub C); +#[derive(Reflect)] +struct BasicAnimationCurveEvaluator +where + A: Animatable, +{ + stack: Vec>, + blend_register: Option<(A, f32)>, +} -impl AnimationCurve for WeightsCurve +#[derive(Reflect)] +struct BasicAnimationCurveEvaluatorStackElement where - C: IterableCurve + Debug + Clone + Reflectable, + A: Animatable, { - fn clone_value(&self) -> Box { - Box::new(self.clone()) - } + value: A, + weight: f32, + graph_node: AnimationNodeIndex, +} - fn domain(&self) -> Interval { - self.0.domain() +impl Default for BasicAnimationCurveEvaluator +where + A: Animatable, +{ + fn default() -> Self { + BasicAnimationCurveEvaluator { + stack: vec![], + blend_register: None, + } } +} - fn apply<'a>( - &self, - t: f32, - _transform: Option>, - mut entity: AnimationEntityMut<'a>, - weight: f32, +impl BasicAnimationCurveEvaluator +where + A: Animatable, +{ + fn combine( + &mut self, + graph_node: AnimationNodeIndex, + additive: bool, ) -> Result<(), AnimationEvaluationError> { - let mut dest = entity.get_mut::().ok_or_else(|| { - AnimationEvaluationError::ComponentNotPresent(TypeId::of::()) - })?; - lerp_morph_weights(dest.weights_mut(), self.0.sample_iter_clamped(t), weight); + let Some(top) = self.stack.last() else { + return Ok(()); + }; + if top.graph_node != graph_node { + return Ok(()); + } + + let BasicAnimationCurveEvaluatorStackElement { + value: value_to_blend, + weight: weight_to_blend, + graph_node: _, + } = self.stack.pop().unwrap(); + + match self.blend_register.take() { + None => { + self.initialize_blend_register(value_to_blend, weight_to_blend, additive); + } + Some((mut current_value, mut current_weight)) => { + current_weight += weight_to_blend; + + if additive { + current_value = A::blend( + [ + BlendInput { + weight: 1.0, + value: current_value, + additive: true, + }, + BlendInput { + weight: weight_to_blend, + value: value_to_blend, + additive: true, + }, + ] + .into_iter(), + ); + } else { + current_value = A::interpolate( + ¤t_value, + &value_to_blend, + weight_to_blend / current_weight, + ); + } + + self.blend_register = Some((current_value, current_weight)); + } + } + Ok(()) } -} -/// Update `morph_weights` based on weights in `incoming_weights` with a linear interpolation -/// on `lerp_weight`. -fn lerp_morph_weights( - morph_weights: &mut [f32], - incoming_weights: impl Iterator, - lerp_weight: f32, -) { - let zipped = morph_weights.iter_mut().zip(incoming_weights); - for (morph_weight, incoming_weights) in zipped { - *morph_weight = morph_weight.lerp(incoming_weights, lerp_weight); + fn initialize_blend_register(&mut self, value: A, weight: f32, additive: bool) { + if additive { + let scaled_value = A::blend( + [BlendInput { + weight, + value, + additive: true, + }] + .into_iter(), + ); + self.blend_register = Some((scaled_value, weight)); + } else { + self.blend_register = Some((value, weight)); + } + } + + fn push_blend_register( + &mut self, + weight: f32, + graph_node: AnimationNodeIndex, + ) -> Result<(), AnimationEvaluationError> { + if let Some((value, _)) = self.blend_register.take() { + self.stack.push(BasicAnimationCurveEvaluatorStackElement { + value, + weight, + graph_node, + }); + } + Ok(()) } } -/// A low-level trait that provides control over how curves are actually applied to entities -/// by the animation system. +/// A low-level trait that provides control over how curves are actually applied +/// to entities by the animation system. /// -/// Typically, this will not need to be implemented manually, since it is automatically -/// implemented by [`AnimatableCurve`] and other curves used by the animation system -/// (e.g. those that animate parts of transforms or morph weights). However, this can be -/// implemented manually when `AnimatableCurve` is not sufficiently expressive. +/// Typically, this will not need to be implemented manually, since it is +/// automatically implemented by [`AnimatableCurve`] and other curves used by +/// the animation system (e.g. those that animate parts of transforms or morph +/// weights). However, this can be implemented manually when `AnimatableCurve` +/// is not sufficiently expressive. /// -/// In many respects, this behaves like a type-erased form of [`Curve`], where the output -/// type of the curve is remembered only in the components that are mutated in the -/// implementation of [`apply`]. +/// In many respects, this behaves like a type-erased form of [`Curve`], where +/// the output type of the curve is remembered only in the components that are +/// mutated in the implementation of [`apply`]. /// /// [`apply`]: AnimationCurve::apply -pub trait AnimationCurve: Reflect + Debug + Send + Sync { +pub trait AnimationCurve: Debug + Send + Sync + 'static { /// Returns a boxed clone of this value. fn clone_value(&self) -> Box; /// The range of times for which this animation is defined. fn domain(&self) -> Interval; - /// Write the value of sampling this curve at time `t` into `transform` or `entity`, - /// as appropriate, interpolating between the existing value and the sampled value - /// using the given `weight`. - fn apply<'a>( + /// Returns the type ID of the [`AnimationCurveEvaluator`]. + /// + /// This must match the type returned by [`Self::create_evaluator`]. It must + /// be a single type that doesn't depend on the type of the curve. + fn evaluator_id(&self) -> EvaluatorId; + + /// Returns a newly-instantiated [`AnimationCurveEvaluator`] for use with + /// this curve. + /// + /// All curve types must return the same type of + /// [`AnimationCurveEvaluator`]. The returned value must match the type + /// returned by [`Self::evaluator_id`]. + fn create_evaluator(&self) -> Box; + + /// Samples the curve at the given time `t`, and pushes the sampled value + /// onto the evaluation stack of the `curve_evaluator`. + /// + /// The `curve_evaluator` parameter points to the value returned by + /// [`Self::create_evaluator`], upcast to an `&mut dyn + /// AnimationCurveEvaluator`. Typically, implementations of [`Self::apply`] + /// will want to downcast the `curve_evaluator` parameter to the concrete + /// type [`Self::evaluator_id`] in order to push values of the appropriate + /// type onto its evaluation stack. + /// + /// Be sure not to confuse the `t` and `weight` values. The former + /// determines the position at which the *curve* is sampled, while `weight` + /// ultimately determines how much the *stack values* will be blended + /// together (see the definition of [`AnimationCurveEvaluator::blend`]). + fn apply( &self, + curve_evaluator: &mut dyn AnimationCurveEvaluator, t: f32, - transform: Option>, - entity: AnimationEntityMut<'a>, weight: f32, + graph_node: AnimationNodeIndex, ) -> Result<(), AnimationEvaluationError>; } +/// The [`EvaluatorId`] is used to look up the [`AnimationCurveEvaluator`] for an [`AnimatableProperty`]. +/// For a given animated property, this ID should always be the same to allow things like animation blending to occur. +#[derive(Clone)] +pub enum EvaluatorId<'a> { + /// Corresponds to a specific field on a specific component type. + /// The `TypeId` should correspond to the component type, and the `usize` + /// should correspond to the Reflect-ed field index of the field. + // + // IMPLEMENTATION NOTE: The Hashed<(TypeId, usize) is intentionally cheap to clone, as it will be cloned per frame by the evaluator + // Switching the field index `usize` for something like a field name `String` would probably be too expensive to justify + ComponentField(&'a Hashed<(TypeId, usize)>), + /// Corresponds to a custom property of a given type. This should be the [`TypeId`] + /// of the custom [`AnimatableProperty`]. + Type(TypeId), +} + +/// A low-level trait for use in [`crate::VariableCurve`] that provides fine +/// control over how animations are evaluated. +/// +/// You can implement this trait when the generic [`AnimatableCurveEvaluator`] +/// isn't sufficiently-expressive for your needs. For example, [`MorphWeights`] +/// implements this trait instead of using [`AnimatableCurveEvaluator`] because +/// it needs to animate arbitrarily many weights at once, which can't be done +/// with [`Animatable`] as that works on fixed-size values only. +/// +/// If you implement this trait, you should also implement [`AnimationCurve`] on +/// your curve type, as that trait allows creating instances of this one. +/// +/// Implementations of [`AnimatableCurveEvaluator`] should maintain a *stack* of +/// (value, weight, node index) triples, as well as a *blend register*, which is +/// either a (value, weight) pair or empty. *Value* here refers to an instance +/// of the value being animated: for example, [`Vec3`] in the case of +/// translation keyframes. The stack stores intermediate values generated while +/// evaluating the [`crate::graph::AnimationGraph`], while the blend register +/// stores the result of a blend operation. +/// +/// [`Vec3`]: bevy_math::Vec3 +pub trait AnimationCurveEvaluator: Downcast + Send + Sync + 'static { + /// Blends the top element of the stack with the blend register. + /// + /// The semantics of this method are as follows: + /// + /// 1. Pop the top element of the stack. Call its value vₘ and its weight + /// wₘ. If the stack was empty, return success. + /// + /// 2. If the blend register is empty, set the blend register value to vₘ + /// and the blend register weight to wₘ; then, return success. + /// + /// 3. If the blend register is nonempty, call its current value vₙ and its + /// current weight wₙ. Then, set the value of the blend register to + /// `interpolate(vₙ, vₘ, wₘ / (wₘ + wₙ))`, and set the weight of the blend + /// register to wₘ + wₙ. + /// + /// 4. Return success. + fn blend(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError>; + + /// Additively blends the top element of the stack with the blend register. + /// + /// The semantics of this method are as follows: + /// + /// 1. Pop the top element of the stack. Call its value vₘ and its weight + /// wₘ. If the stack was empty, return success. + /// + /// 2. If the blend register is empty, set the blend register value to vₘ + /// and the blend register weight to wₘ; then, return success. + /// + /// 3. If the blend register is nonempty, call its current value vₙ. + /// Then, set the value of the blend register to vₙ + vₘwₘ. + /// + /// 4. Return success. + fn add(&mut self, graph_node: AnimationNodeIndex) -> Result<(), AnimationEvaluationError>; + + /// Pushes the current value of the blend register onto the stack. + /// + /// If the blend register is empty, this method does nothing successfully. + /// Otherwise, this method pushes the current value of the blend register + /// onto the stack, alongside the weight and graph node supplied to this + /// function. The weight present in the blend register is discarded; only + /// the weight parameter to this function is pushed onto the stack. The + /// blend register is emptied after this process. + fn push_blend_register( + &mut self, + weight: f32, + graph_node: AnimationNodeIndex, + ) -> Result<(), AnimationEvaluationError>; + + /// Pops the top value off the stack and writes it into the appropriate + /// component. + /// + /// If the stack is empty, this method does nothing successfully. Otherwise, + /// it pops the top value off the stack, fetches the associated component + /// from either the `transform` or `entity` values as appropriate, and + /// updates the appropriate property with the value popped from the stack. + /// The weight and node index associated with the popped stack element are + /// discarded. After doing this, the stack is emptied. + /// + /// The property on the component must be overwritten with the value from + /// the stack, not blended with it. + fn commit<'a>( + &mut self, + entity: AnimationEntityMut<'a>, + ) -> Result<(), AnimationEvaluationError>; +} + +impl_downcast!(AnimationCurveEvaluator); + /// A [curve] defined by keyframes with values in an [animatable] type. /// /// The keyframes are interpolated using the type's [`Animatable::interpolate`] implementation. @@ -470,14 +934,14 @@ where } #[inline] - fn sample_unchecked(&self, t: f32) -> T { + fn sample_clamped(&self, t: f32) -> T { + // `UnevenCore::sample_with` is implicitly clamped. self.core.sample_with(t, ::interpolate) } #[inline] - fn sample_clamped(&self, t: f32) -> T { - // Sampling by keyframes is automatically clamped to the keyframe bounds. - self.sample_unchecked(t) + fn sample_unchecked(&self, t: f32) -> T { + self.sample_clamped(t) } } @@ -496,3 +960,53 @@ where }) } } + +fn inconsistent

() -> AnimationEvaluationError +where + P: 'static + ?Sized, +{ + AnimationEvaluationError::InconsistentEvaluatorImplementation(TypeId::of::

()) +} + +/// Returns an [`AnimatedField`] with a given `$component` and `$field`. +/// +/// This can be used in the following way: +/// +/// ``` +/// # use bevy_animation::{animation_curves::AnimatedField, animated_field}; +/// # use bevy_ecs::component::Component; +/// # use bevy_math::Vec3; +/// # use bevy_reflect::Reflect; +/// #[derive(Component, Reflect)] +/// struct Transform { +/// translation: Vec3, +/// } +/// +/// let field = animated_field!(Transform::translation); +/// ``` +#[macro_export] +macro_rules! animated_field { + ($component:ident::$field:ident) => { + AnimatedField::new_unchecked(stringify!($field), |component: &mut $component| { + &mut component.$field + }) + }; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_animated_field_tuple_struct_simple_uses() { + #[derive(Clone, Debug, Component, Reflect)] + struct A(f32); + let _ = AnimatedField::new_unchecked("0", |a: &mut A| &mut a.0); + + #[derive(Clone, Debug, Component, Reflect)] + struct B(f32, f64, f32); + let _ = AnimatedField::new_unchecked("0", |b: &mut B| &mut b.0); + let _ = AnimatedField::new_unchecked("1", |b: &mut B| &mut b.1); + let _ = AnimatedField::new_unchecked("2", |b: &mut B| &mut b.2); + } +} diff --git a/crates/bevy_animation/src/gltf_curves.rs b/crates/bevy_animation/src/gltf_curves.rs index f32ddc4ab01760..d5b2cbb6b984eb 100644 --- a/crates/bevy_animation/src/gltf_curves.rs +++ b/crates/bevy_animation/src/gltf_curves.rs @@ -5,6 +5,7 @@ use bevy_math::{ vec4, Quat, Vec4, VectorSpace, }; use bevy_reflect::Reflect; +use either::Either; use thiserror::Error; /// A keyframe-defined curve that "interpolates" by stepping at `t = 1.0` to the next keyframe. @@ -23,10 +24,15 @@ where } #[inline] - fn sample_unchecked(&self, t: f32) -> T { + fn sample_clamped(&self, t: f32) -> T { self.core .sample_with(t, |x, y, t| if t >= 1.0 { y.clone() } else { x.clone() }) } + + #[inline] + fn sample_unchecked(&self, t: f32) -> T { + self.sample_clamped(t) + } } impl SteppedKeyframeCurve { @@ -57,7 +63,7 @@ where } #[inline] - fn sample_unchecked(&self, t: f32) -> V { + fn sample_clamped(&self, t: f32) -> V { match self.core.sample_interp_timed(t) { // In all the cases where only one frame matters, defer to the position within it. InterpolationDatum::Exact((_, v)) @@ -69,6 +75,11 @@ where } } } + + #[inline] + fn sample_unchecked(&self, t: f32) -> V { + self.sample_clamped(t) + } } impl CubicKeyframeCurve { @@ -112,7 +123,7 @@ impl Curve for CubicRotationCurve { } #[inline] - fn sample_unchecked(&self, t: f32) -> Quat { + fn sample_clamped(&self, t: f32) -> Quat { let vec = match self.core.sample_interp_timed(t) { // In all the cases where only one frame matters, defer to the position within it. InterpolationDatum::Exact((_, v)) @@ -125,6 +136,11 @@ impl Curve for CubicRotationCurve { }; Quat::from_vec4(vec.normalize()) } + + #[inline] + fn sample_unchecked(&self, t: f32) -> Quat { + self.sample_clamped(t) + } } impl CubicRotationCurve { @@ -170,18 +186,23 @@ where } #[inline] - fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + fn sample_iter_clamped(&self, t: f32) -> impl Iterator { match self.core.sample_interp(t) { InterpolationDatum::Exact(v) | InterpolationDatum::LeftTail(v) - | InterpolationDatum::RightTail(v) => TwoIterators::Left(v.iter().copied()), + | InterpolationDatum::RightTail(v) => Either::Left(v.iter().copied()), InterpolationDatum::Between(u, v, s) => { let interpolated = u.iter().zip(v.iter()).map(move |(x, y)| x.lerp(*y, s)); - TwoIterators::Right(interpolated) + Either::Right(interpolated) } } } + + #[inline] + fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + self.sample_iter_clamped(t) + } } impl WideLinearKeyframeCurve { @@ -219,21 +240,26 @@ where } #[inline] - fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + fn sample_iter_clamped(&self, t: f32) -> impl Iterator { match self.core.sample_interp(t) { InterpolationDatum::Exact(v) | InterpolationDatum::LeftTail(v) - | InterpolationDatum::RightTail(v) => TwoIterators::Left(v.iter().cloned()), + | InterpolationDatum::RightTail(v) => Either::Left(v.iter().cloned()), InterpolationDatum::Between(u, v, s) => { let interpolated = u.iter() .zip(v.iter()) .map(move |(x, y)| if s >= 1.0 { y.clone() } else { x.clone() }); - TwoIterators::Right(interpolated) + Either::Right(interpolated) } } } + + #[inline] + fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + self.sample_iter_clamped(t) + } } impl WideSteppedKeyframeCurve { @@ -269,7 +295,7 @@ where self.core.domain() } - fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + fn sample_iter_clamped(&self, t: f32) -> impl Iterator { match self.core.sample_interp_timed(t) { InterpolationDatum::Exact((_, v)) | InterpolationDatum::LeftTail((_, v)) @@ -277,14 +303,19 @@ where // Pick out the part of this that actually represents the position (instead of tangents), // which is the middle third. let width = self.core.width(); - TwoIterators::Left(v[width..(width * 2)].iter().copied()) + Either::Left(v[width..(width * 2)].iter().copied()) } - InterpolationDatum::Between((t0, u), (t1, v), s) => TwoIterators::Right( + InterpolationDatum::Between((t0, u), (t1, v), s) => Either::Right( cubic_spline_interpolate_slices(self.core.width() / 3, u, v, s, t1 - t0), ), } } + + #[inline] + fn sample_iter_unchecked(&self, t: f32) -> impl Iterator { + self.sample_iter_clamped(t) + } } /// An error indicating that a multisampling keyframe curve could not be constructed. @@ -299,8 +330,8 @@ pub enum WideKeyframeCurveError { /// The number that `values_given` was supposed to be divisible by. divisor: usize, }, - /// An error was returned by the internal core constructor. + #[error(transparent)] CoreError(#[from] ChunkedUnevenCoreError), } @@ -363,26 +394,6 @@ pub enum WeightsCurve { // HELPERS // //---------// -enum TwoIterators { - Left(A), - Right(B), -} - -impl Iterator for TwoIterators -where - A: Iterator, - B: Iterator, -{ - type Item = T; - - fn next(&mut self) -> Option { - match self { - TwoIterators::Left(a) => a.next(), - TwoIterators::Right(b) => b.next(), - } - } -} - /// Helper function for cubic spline interpolation. fn cubic_spline_interpolation( value_start: T, diff --git a/crates/bevy_animation/src/graph.rs b/crates/bevy_animation/src/graph.rs index 5264cf9a235520..e570d25ab15e3d 100644 --- a/crates/bevy_animation/src/graph.rs +++ b/crates/bevy_animation/src/graph.rs @@ -1,14 +1,31 @@ //! The animation graph, which allows animations to be blended together. -use core::ops::{Index, IndexMut}; +use core::{ + iter, + ops::{Index, IndexMut, Range}, +}; use std::io::{self, Write}; -use bevy_asset::{io::Reader, Asset, AssetId, AssetLoader, AssetPath, Handle, LoadContext}; -use bevy_reflect::{Reflect, ReflectSerialize}; +use bevy_asset::{ + io::Reader, Asset, AssetEvent, AssetId, AssetLoader, AssetPath, Assets, Handle, LoadContext, +}; +use bevy_derive::{Deref, DerefMut}; +use bevy_ecs::{ + component::Component, + event::EventReader, + reflect::ReflectComponent, + system::{Res, ResMut, Resource}, +}; +use bevy_reflect::{prelude::ReflectDefault, Reflect, ReflectSerialize}; use bevy_utils::HashMap; -use petgraph::graph::{DiGraph, NodeIndex}; +use derive_more::derive::From; +use petgraph::{ + graph::{DiGraph, NodeIndex}, + Direction, +}; use ron::de::SpannedError; use serde::{Deserialize, Serialize}; +use smallvec::SmallVec; use thiserror::Error; use crate::{AnimationClip, AnimationTargetId}; @@ -24,11 +41,12 @@ use crate::{AnimationClip, AnimationTargetId}; /// the root and blends the animations together in a bottom-up fashion to /// produce the final pose. /// -/// There are two types of nodes: *blend nodes* and *clip nodes*, both of which -/// can have an associated weight. Blend nodes have no associated animation clip -/// and simply affect the weights of all their descendant nodes. Clip nodes -/// specify an animation clip to play. When a graph is created, it starts with -/// only a single blend node, the root node. +/// There are three types of nodes: *blend nodes*, *add nodes*, and *clip +/// nodes*, all of which can have an associated weight. Blend nodes and add +/// nodes have no associated animation clip and combine the animations of their +/// children according to those children's weights. Clip nodes specify an +/// animation clip to play. When a graph is created, it starts with only a +/// single blend node, the root node. /// /// For example, consider the following graph: /// @@ -110,28 +128,47 @@ pub struct AnimationGraph { pub mask_groups: HashMap, } +/// A [`Handle`] to the [`AnimationGraph`] to be used by the [`AnimationPlayer`](crate::AnimationPlayer) on the same entity. +#[derive(Component, Clone, Debug, Default, Deref, DerefMut, Reflect, PartialEq, Eq, From)] +#[reflect(Component, Default)] +pub struct AnimationGraphHandle(pub Handle); + +impl From for AssetId { + fn from(handle: AnimationGraphHandle) -> Self { + handle.id() + } +} + +impl From<&AnimationGraphHandle> for AssetId { + fn from(handle: &AnimationGraphHandle) -> Self { + handle.id() + } +} + /// A type alias for the `petgraph` data structure that defines the animation /// graph. pub type AnimationDiGraph = DiGraph; /// The index of either an animation or blend node in the animation graph. /// -/// These indices are the way that [`crate::AnimationPlayer`]s identify -/// particular animations. +/// These indices are the way that [animation players] identify each animation. +/// +/// [animation players]: crate::AnimationPlayer pub type AnimationNodeIndex = NodeIndex; /// An individual node within an animation graph. /// -/// If `clip` is present, this is a *clip node*. Otherwise, it's a *blend node*. -/// Both clip and blend nodes can have weights, and those weights are propagated -/// down to descendants. +/// The [`AnimationGraphNode::node_type`] field specifies the type of node: one +/// of a *clip node*, a *blend node*, or an *add node*. Clip nodes, the leaves +/// of the graph, contain animation clips to play. Blend and add nodes describe +/// how to combine their children to produce a final animation. #[derive(Clone, Reflect, Debug)] pub struct AnimationGraphNode { - /// The animation clip associated with this node, if any. + /// Animation node data specific to the type of node (clip, blend, or add). /// - /// If the clip is present, this node is an *animation clip node*. - /// Otherwise, this node is a *blend node*. - pub clip: Option>, + /// In the case of clip nodes, this contains the actual animation clip + /// associated with the node. + pub node_type: AnimationNodeType, /// A bitfield specifying the mask groups that this node and its descendants /// will not affect. @@ -141,14 +178,59 @@ pub struct AnimationGraphNode { /// this node and its descendants *cannot* animate mask group N. pub mask: AnimationMask, - /// The weight of this node. + /// The weight of this node, which signifies its contribution in blending. + /// + /// Note that this does not propagate down the graph hierarchy; rather, + /// each [Blend] and [Add] node uses the weights of its children to determine + /// the total animation that is accumulated at that node. The parent node's + /// weight is used only to determine the contribution of that total animation + /// in *further* blending. /// - /// Weights are propagated down to descendants. Thus if an animation clip - /// has weight 0.3 and its parent blend node has weight 0.6, the computed - /// weight of the animation clip is 0.18. + /// In other words, it is as if the blend node is replaced by a single clip + /// node consisting of the blended animation with the weight specified at the + /// blend node. + /// + /// For animation clips, this weight is also multiplied by the [active animation weight] + /// before being applied. + /// + /// [Blend]: AnimationNodeType::Blend + /// [Add]: AnimationNodeType::Add + /// [active animation weight]: crate::ActiveAnimation::weight pub weight: f32, } +/// Animation node data specific to the type of node (clip, blend, or add). +/// +/// In the case of clip nodes, this contains the actual animation clip +/// associated with the node. +#[derive(Clone, Default, Reflect, Debug)] +pub enum AnimationNodeType { + /// A *clip node*, which plays an animation clip. + /// + /// These are always the leaves of the graph. + Clip(Handle), + + /// A *blend node*, which blends its children according to their weights. + /// + /// The weights of all the children of this node are normalized to 1.0. + #[default] + Blend, + + /// An *additive blend node*, which combines the animations of its children + /// additively. + /// + /// The weights of all the children of this node are *not* normalized to + /// 1.0. Rather, each child is multiplied by its respective weight and + /// added in sequence. + /// + /// Add nodes are primarily useful for superimposing an animation for a + /// portion of a rig on top of the main animation. For example, an add node + /// could superimpose a weapon attack animation for a character's limb on + /// top of a running animation to produce an animation of a character + /// attacking while running. + Add, +} + /// An [`AssetLoader`] that can load [`AnimationGraph`]s as assets. /// /// The canonical extension for [`AnimationGraph`]s is `.animgraph.ron`. Plain @@ -172,6 +254,99 @@ pub enum AnimationGraphLoadError { SpannedRon(#[from] SpannedError), } +/// Acceleration structures for animation graphs that allows Bevy to evaluate +/// them quickly. +/// +/// These are kept up to date as [`AnimationGraph`] instances are added, +/// modified, and removed. +#[derive(Default, Reflect, Resource)] +pub struct ThreadedAnimationGraphs( + pub(crate) HashMap, ThreadedAnimationGraph>, +); + +/// An acceleration structure for an animation graph that allows Bevy to +/// evaluate it quickly. +/// +/// This is kept up to date as the associated [`AnimationGraph`] instance is +/// added, modified, or removed. +#[derive(Default, Reflect)] +pub struct ThreadedAnimationGraph { + /// A cached postorder traversal of the graph. + /// + /// The node indices here are stored in postorder. Siblings are stored in + /// descending order. This is because the + /// [`crate::animation_curves::AnimationCurveEvaluator`] uses a stack for + /// evaluation. Consider this graph: + /// + /// ```text + /// ┌─────┐ + /// │ │ + /// │ 1 │ + /// │ │ + /// └──┬──┘ + /// │ + /// ┌───────┼───────┐ + /// │ │ │ + /// ▼ ▼ ▼ + /// ┌─────┐ ┌─────┐ ┌─────┐ + /// │ │ │ │ │ │ + /// │ 2 │ │ 3 │ │ 4 │ + /// │ │ │ │ │ │ + /// └──┬──┘ └─────┘ └─────┘ + /// │ + /// ┌───┴───┐ + /// │ │ + /// ▼ ▼ + /// ┌─────┐ ┌─────┐ + /// │ │ │ │ + /// │ 5 │ │ 6 │ + /// │ │ │ │ + /// └─────┘ └─────┘ + /// ``` + /// + /// The postorder traversal in this case will be (4, 3, 6, 5, 2, 1). + /// + /// The fact that the children of each node are sorted in reverse ensures + /// that, at each level, the order of blending proceeds in ascending order + /// by node index, as we guarantee. To illustrate this, consider the way + /// the graph above is evaluated. (Interpolation is represented with the ⊕ + /// symbol.) + /// + /// | Step | Node | Operation | Stack (after operation) | Blend Register | + /// | ---- | ---- | ---------- | ----------------------- | -------------- | + /// | 1 | 4 | Push | 4 | | + /// | 2 | 3 | Push | 4 3 | | + /// | 3 | 6 | Push | 4 3 6 | | + /// | 4 | 5 | Push | 4 3 6 5 | | + /// | 5 | 2 | Blend 5 | 4 3 6 | 5 | + /// | 6 | 2 | Blend 6 | 4 3 | 5 ⊕ 6 | + /// | 7 | 2 | Push Blend | 4 3 2 | | + /// | 8 | 1 | Blend 2 | 4 3 | 2 | + /// | 9 | 1 | Blend 3 | 4 | 2 ⊕ 3 | + /// | 10 | 1 | Blend 4 | | 2 ⊕ 3 ⊕ 4 | + /// | 11 | 1 | Push Blend | 1 | | + /// | 12 | | Commit | | | + pub threaded_graph: Vec, + + /// A mapping from each parent node index to the range within + /// [`Self::sorted_edges`]. + /// + /// This allows for quick lookup of the children of each node, sorted in + /// ascending order of node index, without having to sort the result of the + /// `petgraph` traversal functions every frame. + pub sorted_edge_ranges: Vec>, + + /// A list of the children of each node, sorted in ascending order. + pub sorted_edges: Vec, + + /// A mapping from node index to a bitfield specifying the mask groups that + /// this node masks *out* (i.e. doesn't animate). + /// + /// A 1 in bit position N indicates that this node doesn't animate any + /// targets of mask group N. + pub computed_masks: Vec, +} + /// A version of [`AnimationGraph`] suitable for serializing as an asset. /// /// Animation nodes can refer to external animation clips, and the [`AssetId`] @@ -196,14 +371,26 @@ pub struct SerializedAnimationGraph { /// See the comments in [`SerializedAnimationGraph`] for more information. #[derive(Serialize, Deserialize)] pub struct SerializedAnimationGraphNode { - /// Corresponds to the `clip` field on [`AnimationGraphNode`]. - pub clip: Option, + /// Corresponds to the `node_type` field on [`AnimationGraphNode`]. + pub node_type: SerializedAnimationNodeType, /// Corresponds to the `mask` field on [`AnimationGraphNode`]. pub mask: AnimationMask, /// Corresponds to the `weight` field on [`AnimationGraphNode`]. pub weight: f32, } +/// A version of [`AnimationNodeType`] suitable for serializing as part of a +/// [`SerializedAnimationGraphNode`] asset. +#[derive(Serialize, Deserialize)] +pub enum SerializedAnimationNodeType { + /// Corresponds to [`AnimationNodeType::Clip`]. + Clip(SerializedAnimationClip), + /// Corresponds to [`AnimationNodeType::Blend`]. + Blend, + /// Corresponds to [`AnimationNodeType::Add`]. + Add, +} + /// A version of `Handle` suitable for serializing as an asset. /// /// This replaces any handle that has a path with an [`AssetPath`]. Failing @@ -235,7 +422,7 @@ impl AnimationGraph { Self { graph, root, - mask_groups: HashMap::new(), + mask_groups: HashMap::default(), } } @@ -256,7 +443,7 @@ impl AnimationGraph { /// All of the animation clips will be direct children of the root with /// weight 1.0. /// - /// Returns the the graph and indices of the new nodes. + /// Returns the graph and indices of the new nodes. pub fn from_clips<'a, I>(clips: I) -> (Self, Vec) where I: IntoIterator>, @@ -279,7 +466,7 @@ impl AnimationGraph { parent: AnimationNodeIndex, ) -> AnimationNodeIndex { let node_index = self.graph.add_node(AnimationGraphNode { - clip: Some(clip), + node_type: AnimationNodeType::Clip(clip), mask: 0, weight, }); @@ -299,7 +486,7 @@ impl AnimationGraph { parent: AnimationNodeIndex, ) -> AnimationNodeIndex { let node_index = self.graph.add_node(AnimationGraphNode { - clip: Some(clip), + node_type: AnimationNodeType::Clip(clip), mask, weight, }); @@ -338,7 +525,7 @@ impl AnimationGraph { /// no mask. pub fn add_blend(&mut self, weight: f32, parent: AnimationNodeIndex) -> AnimationNodeIndex { let node_index = self.graph.add_node(AnimationGraphNode { - clip: None, + node_type: AnimationNodeType::Blend, mask: 0, weight, }); @@ -361,7 +548,51 @@ impl AnimationGraph { parent: AnimationNodeIndex, ) -> AnimationNodeIndex { let node_index = self.graph.add_node(AnimationGraphNode { - clip: None, + node_type: AnimationNodeType::Blend, + mask, + weight, + }); + self.graph.add_edge(parent, node_index, ()); + node_index + } + + /// Adds a blend node to the animation graph with the given weight and + /// returns its index. + /// + /// The blend node will be placed under the supplied `parent` node. During + /// animation evaluation, the descendants of this blend node will have their + /// weights multiplied by the weight of the blend. The blend node will have + /// no mask. + pub fn add_additive_blend( + &mut self, + weight: f32, + parent: AnimationNodeIndex, + ) -> AnimationNodeIndex { + let node_index = self.graph.add_node(AnimationGraphNode { + node_type: AnimationNodeType::Add, + mask: 0, + weight, + }); + self.graph.add_edge(parent, node_index, ()); + node_index + } + + /// Adds a blend node to the animation graph with the given weight and + /// returns its index. + /// + /// The blend node will be placed under the supplied `parent` node. During + /// animation evaluation, the descendants of this blend node will have their + /// weights multiplied by the weight of the blend. Neither this node nor its + /// descendants will affect animation targets that belong to mask groups not + /// in the given `mask`. + pub fn add_additive_blend_with_mask( + &mut self, + mask: AnimationMask, + weight: f32, + parent: AnimationNodeIndex, + ) -> AnimationNodeIndex { + let node_index = self.graph.add_node(AnimationGraphNode { + node_type: AnimationNodeType::Add, mask, weight, }); @@ -488,7 +719,7 @@ impl IndexMut for AnimationGraph { impl Default for AnimationGraphNode { fn default() -> Self { Self { - clip: None, + node_type: Default::default(), mask: 0, weight: 1.0, } @@ -528,12 +759,18 @@ impl AssetLoader for AnimationGraphAssetLoader { Ok(AnimationGraph { graph: serialized_animation_graph.graph.map( |_, serialized_node| AnimationGraphNode { - clip: serialized_node.clip.as_ref().map(|clip| match clip { - SerializedAnimationClip::AssetId(asset_id) => Handle::Weak(*asset_id), - SerializedAnimationClip::AssetPath(asset_path) => { - load_context.load(asset_path) - } - }), + node_type: match serialized_node.node_type { + SerializedAnimationNodeType::Clip(ref clip) => match clip { + SerializedAnimationClip::AssetId(asset_id) => { + AnimationNodeType::Clip(Handle::Weak(*asset_id)) + } + SerializedAnimationClip::AssetPath(asset_path) => { + AnimationNodeType::Clip(load_context.load(asset_path)) + } + }, + SerializedAnimationNodeType::Blend => AnimationNodeType::Blend, + SerializedAnimationNodeType::Add => AnimationNodeType::Add, + }, mask: serialized_node.mask, weight: serialized_node.weight, }, @@ -559,10 +796,18 @@ impl From for SerializedAnimationGraph { |_, node| SerializedAnimationGraphNode { weight: node.weight, mask: node.mask, - clip: node.clip.as_ref().map(|clip| match clip.path() { - Some(path) => SerializedAnimationClip::AssetPath(path.clone()), - None => SerializedAnimationClip::AssetId(clip.id()), - }), + node_type: match node.node_type { + AnimationNodeType::Clip(ref clip) => match clip.path() { + Some(path) => SerializedAnimationNodeType::Clip( + SerializedAnimationClip::AssetPath(path.clone()), + ), + None => SerializedAnimationNodeType::Clip( + SerializedAnimationClip::AssetId(clip.id()), + ), + }, + AnimationNodeType::Blend => SerializedAnimationNodeType::Blend, + AnimationNodeType::Add => SerializedAnimationNodeType::Add, + }, }, |_, _| (), ), @@ -571,3 +816,112 @@ impl From for SerializedAnimationGraph { } } } + +/// A system that creates, updates, and removes [`ThreadedAnimationGraph`] +/// structures for every changed [`AnimationGraph`]. +/// +/// The [`ThreadedAnimationGraph`] contains acceleration structures that allow +/// for quick evaluation of that graph's animations. +pub(crate) fn thread_animation_graphs( + mut threaded_animation_graphs: ResMut, + animation_graphs: Res>, + mut animation_graph_asset_events: EventReader>, +) { + for animation_graph_asset_event in animation_graph_asset_events.read() { + match *animation_graph_asset_event { + AssetEvent::Added { id } + | AssetEvent::Modified { id } + | AssetEvent::LoadedWithDependencies { id } => { + // Fetch the animation graph. + let Some(animation_graph) = animation_graphs.get(id) else { + continue; + }; + + // Reuse the allocation if possible. + let mut threaded_animation_graph = + threaded_animation_graphs.0.remove(&id).unwrap_or_default(); + threaded_animation_graph.clear(); + + // Recursively thread the graph in postorder. + threaded_animation_graph.init(animation_graph); + threaded_animation_graph.build_from( + &animation_graph.graph, + animation_graph.root, + 0, + ); + + // Write in the threaded graph. + threaded_animation_graphs + .0 + .insert(id, threaded_animation_graph); + } + + AssetEvent::Removed { id } => { + threaded_animation_graphs.0.remove(&id); + } + AssetEvent::Unused { .. } => {} + } + } +} + +impl ThreadedAnimationGraph { + /// Removes all the data in this [`ThreadedAnimationGraph`], keeping the + /// memory around for later reuse. + fn clear(&mut self) { + self.threaded_graph.clear(); + self.sorted_edge_ranges.clear(); + self.sorted_edges.clear(); + } + + /// Prepares the [`ThreadedAnimationGraph`] for recursion. + fn init(&mut self, animation_graph: &AnimationGraph) { + let node_count = animation_graph.graph.node_count(); + let edge_count = animation_graph.graph.edge_count(); + + self.threaded_graph.reserve(node_count); + self.sorted_edges.reserve(edge_count); + + self.sorted_edge_ranges.clear(); + self.sorted_edge_ranges + .extend(iter::repeat(0..0).take(node_count)); + + self.computed_masks.clear(); + self.computed_masks.extend(iter::repeat(0).take(node_count)); + } + + /// Recursively constructs the [`ThreadedAnimationGraph`] for the subtree + /// rooted at the given node. + /// + /// `mask` specifies the computed mask of the parent node. (It could be + /// fetched from the [`Self::computed_masks`] field, but we pass it + /// explicitly as a micro-optimization.) + fn build_from( + &mut self, + graph: &AnimationDiGraph, + node_index: AnimationNodeIndex, + mut mask: u64, + ) { + // Accumulate the mask. + mask |= graph.node_weight(node_index).unwrap().mask; + self.computed_masks[node_index.index()] = mask; + + // Gather up the indices of our children, and sort them. + let mut kids: SmallVec<[AnimationNodeIndex; 8]> = graph + .neighbors_directed(node_index, Direction::Outgoing) + .collect(); + kids.sort_unstable(); + + // Write in the list of kids. + self.sorted_edge_ranges[node_index.index()] = + (self.sorted_edges.len() as u32)..((self.sorted_edges.len() + kids.len()) as u32); + self.sorted_edges.extend_from_slice(&kids); + + // Recurse. (This is a postorder traversal.) + for kid in kids.into_iter().rev() { + self.build_from(graph, kid, mask); + } + + // Finally, push our index. + self.threaded_graph.push(node_index); + } +} diff --git a/crates/bevy_animation/src/lib.rs b/crates/bevy_animation/src/lib.rs old mode 100755 new mode 100644 index 39a8349d812126..e03ad9a449982c --- a/crates/bevy_animation/src/lib.rs +++ b/crates/bevy_animation/src/lib.rs @@ -16,17 +16,23 @@ pub mod graph; pub mod transition; mod util; -use alloc::collections::BTreeMap; use core::{ - any::{Any, TypeId}, + any::TypeId, cell::RefCell, fmt::Debug, hash::{Hash, Hasher}, - iter, + iter, slice, }; +use graph::AnimationNodeType; +use prelude::AnimationCurveEvaluator; -use bevy_app::{App, Plugin, PostUpdate}; -use bevy_asset::{Asset, AssetApp, Assets, Handle}; +use crate::{ + graph::{AnimationGraphHandle, ThreadedAnimationGraphs}, + prelude::EvaluatorId, +}; + +use bevy_app::{Animation, App, Plugin, PostUpdate}; +use bevy_asset::{Asset, AssetApp, Assets}; use bevy_core::Name; use bevy_ecs::{ entity::{VisitEntities, VisitEntitiesMut}, @@ -34,23 +40,15 @@ use bevy_ecs::{ reflect::{ReflectMapEntities, ReflectVisitEntities, ReflectVisitEntitiesMut}, world::EntityMutExcept, }; -use bevy_reflect::{ - prelude::ReflectDefault, utility::NonGenericTypeInfoCell, ApplyError, DynamicTupleStruct, - FromReflect, FromType, GetTypeRegistration, PartialReflect, Reflect, ReflectFromPtr, - ReflectKind, ReflectMut, ReflectOwned, ReflectRef, TupleStruct, TupleStructFieldIter, - TupleStructInfo, TypeInfo, TypePath, TypeRegistration, Typed, UnnamedField, -}; +use bevy_math::FloatOrd; +use bevy_reflect::{prelude::ReflectDefault, Reflect, TypePath}; use bevy_time::Time; -use bevy_transform::{prelude::Transform, TransformSystem}; -use bevy_ui::UiSystem; +use bevy_transform::TransformSystem; use bevy_utils::{ - hashbrown::HashMap, tracing::{trace, warn}, - NoOpHash, + HashMap, NoOpHash, PreHashMap, PreHashMapExt, TypeIdMap, }; -use fixedbitset::FixedBitSet; -use graph::AnimationMask; -use petgraph::{graph::NodeIndex, Direction}; +use petgraph::graph::NodeIndex; use serde::{Deserialize, Serialize}; use thread_local::ThreadLocal; use uuid::Uuid; @@ -71,13 +69,14 @@ use crate::{ graph::{AnimationGraph, AnimationGraphAssetLoader, AnimationNodeIndex}, transition::{advance_transitions, expire_completed_transitions, AnimationTransitions}, }; +use alloc::sync::Arc; /// The [UUID namespace] of animation targets (e.g. bones). /// /// [UUID namespace]: https://en.wikipedia.org/wiki/Universally_unique_identifier#Versions_3_and_5_(namespace_name-based) pub static ANIMATION_TARGET_NAMESPACE: Uuid = Uuid::from_u128(0x3179f519d9274ff2b5966fd077023911); -/// Contains an [animation curve] which is used to animate entities. +/// Contains an [animation curve] which is used to animate a property of an entity. /// /// [animation curve]: AnimationCurve #[derive(Debug, TypePath)] @@ -98,186 +97,62 @@ impl VariableCurve { } } -// We have to implement `PartialReflect` manually because of the embedded -// `Box`, which can't be automatically derived yet. -impl PartialReflect for VariableCurve { - #[inline] - fn get_represented_type_info(&self) -> Option<&'static TypeInfo> { - Some(::type_info()) - } - - #[inline] - fn into_partial_reflect(self: Box) -> Box { - self - } - - #[inline] - fn as_partial_reflect(&self) -> &dyn PartialReflect { - self - } - - #[inline] - fn as_partial_reflect_mut(&mut self) -> &mut dyn PartialReflect { - self - } - - fn try_into_reflect(self: Box) -> Result, Box> { - Ok(self) - } - - #[inline] - fn try_as_reflect(&self) -> Option<&dyn Reflect> { - Some(self) - } - - #[inline] - fn try_as_reflect_mut(&mut self) -> Option<&mut dyn Reflect> { - Some(self) - } - - fn try_apply(&mut self, value: &dyn PartialReflect) -> Result<(), ApplyError> { - if let ReflectRef::TupleStruct(tuple_value) = value.reflect_ref() { - for (i, value) in tuple_value.iter_fields().enumerate() { - if let Some(v) = self.field_mut(i) { - v.try_apply(value)?; - } - } - } else { - return Err(ApplyError::MismatchedKinds { - from_kind: value.reflect_kind(), - to_kind: ReflectKind::TupleStruct, - }); - } - Ok(()) - } - - fn reflect_ref(&self) -> ReflectRef { - ReflectRef::TupleStruct(self) - } - - fn reflect_mut(&mut self) -> ReflectMut { - ReflectMut::TupleStruct(self) - } - - fn reflect_owned(self: Box) -> ReflectOwned { - ReflectOwned::TupleStruct(self) - } - - fn clone_value(&self) -> Box { - Box::new((*self).clone()) - } +/// A list of [`VariableCurve`]s and the [`AnimationTargetId`]s to which they +/// apply. +/// +/// Because animation clips refer to targets by UUID, they can target any +/// [`AnimationTarget`] with that ID. +#[derive(Asset, Reflect, Clone, Debug, Default)] +pub struct AnimationClip { + // This field is ignored by reflection because AnimationCurves can contain things that are not reflect-able + #[reflect(ignore)] + curves: AnimationCurves, + events: AnimationEvents, + duration: f32, } -// We have to implement `Reflect` manually because of the embedded `Box`, which can't be automatically derived yet. -impl Reflect for VariableCurve { - #[inline] - fn into_any(self: Box) -> Box { - self - } - - #[inline] - fn as_any(&self) -> &dyn Any { - self - } - - #[inline] - fn as_any_mut(&mut self) -> &mut dyn Any { - self - } - - #[inline] - fn into_reflect(self: Box) -> Box { - self - } - - #[inline] - fn as_reflect(&self) -> &dyn Reflect { - self - } - - #[inline] - fn as_reflect_mut(&mut self) -> &mut dyn Reflect { - self - } - - #[inline] - fn set(&mut self, value: Box) -> Result<(), Box> { - *self = value.take()?; - Ok(()) - } +#[derive(Reflect, Debug, Clone)] +struct TimedAnimationEvent { + time: f32, + event: AnimationEvent, } -// We have to implement `TupleStruct` manually because of the embedded `Box`, which can't be automatically derived yet. -impl TupleStruct for VariableCurve { - fn field(&self, index: usize) -> Option<&dyn PartialReflect> { - match index { - 0 => Some(self.0.as_partial_reflect()), - _ => None, - } - } - - fn field_mut(&mut self, index: usize) -> Option<&mut dyn PartialReflect> { - match index { - 0 => Some(self.0.as_partial_reflect_mut()), - _ => None, - } - } - - fn field_len(&self) -> usize { - 1 - } - - fn iter_fields(&self) -> TupleStructFieldIter { - TupleStructFieldIter::new(self) - } - - fn clone_dynamic(&self) -> DynamicTupleStruct { - DynamicTupleStruct::from_iter([PartialReflect::clone_value(&*self.0)]) - } +#[derive(Reflect, Debug, Clone)] +struct AnimationEvent { + #[reflect(ignore)] + trigger: AnimationEventFn, } -// We have to implement `FromReflect` manually because of the embedded `Box`, which can't be automatically derived yet. -impl FromReflect for VariableCurve { - fn from_reflect(reflect: &dyn PartialReflect) -> Option { - Some(reflect.try_downcast_ref::()?.clone()) +impl AnimationEvent { + fn trigger(&self, commands: &mut Commands, entity: Entity, time: f32, weight: f32) { + (self.trigger.0)(commands, entity, time, weight); } } -// We have to implement `GetTypeRegistration` manually because of the embedded -// `Box`, which can't be automatically derived yet. -impl GetTypeRegistration for VariableCurve { - fn get_type_registration() -> TypeRegistration { - let mut registration = TypeRegistration::of::(); - registration.insert::(FromType::::from_type()); - registration +#[derive(Reflect, Clone)] +#[reflect(opaque)] +struct AnimationEventFn(Arc); + +impl Default for AnimationEventFn { + fn default() -> Self { + Self(Arc::new(|_commands, _entity, _time, _weight| {})) } } -// We have to implement `Typed` manually because of the embedded `Box`, which can't be automatically derived yet. -impl Typed for VariableCurve { - fn type_info() -> &'static TypeInfo { - static CELL: NonGenericTypeInfoCell = NonGenericTypeInfoCell::new(); - CELL.get_or_set(|| { - TypeInfo::TupleStruct(TupleStructInfo::new::(&[UnnamedField::new::<()>(0)])) - }) +impl Debug for AnimationEventFn { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_tuple("AnimationEventFn").finish() } } -/// A list of [`VariableCurve`]s and the [`AnimationTargetId`]s to which they -/// apply. -/// -/// Because animation clips refer to targets by UUID, they can target any -/// [`AnimationTarget`] with that ID. -#[derive(Asset, Reflect, Clone, Debug, Default)] -pub struct AnimationClip { - curves: AnimationCurves, - duration: f32, +#[derive(Reflect, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)] +enum AnimationEventTarget { + Root, + Node(AnimationTargetId), } +type AnimationEvents = HashMap>; + /// A mapping from [`AnimationTargetId`] (e.g. bone in a skinned mesh) to the /// animation curves. pub type AnimationCurves = HashMap, NoOpHash>; @@ -400,6 +275,20 @@ impl AnimationClip { /// If the curve extends beyond the current duration of this clip, this /// method lengthens this clip to include the entire time span that the /// curve covers. + /// + /// More specifically: + /// - This clip will be sampled on the interval `[0, duration]`. + /// - Each curve in the clip is sampled by first clamping the sample time to its [domain]. + /// - Curves that extend forever never contribute to the duration. + /// + /// For example, a curve with domain `[2, 5]` will extend the clip to cover `[0, 5]` + /// when added and will produce the same output on the entire interval `[0, 2]` because + /// these time values all get clamped to `2`. + /// + /// By contrast, a curve with domain `[-10, ∞]` will never extend the clip duration when + /// added and will be sampled only on `[0, duration]`, ignoring all negative time values. + /// + /// [domain]: AnimationCurve::domain pub fn add_curve_to_target( &mut self, target_id: AnimationTargetId, @@ -435,6 +324,110 @@ impl AnimationClip { .or_default() .push(variable_curve); } + + /// Add a untargeted [`Event`] to this [`AnimationClip`]. + /// + /// The `event` will be cloned and triggered on the [`AnimationPlayer`] entity once the `time` (in seconds) + /// is reached in the animation. + /// + /// See also [`add_event_to_target`](Self::add_event_to_target). + pub fn add_event(&mut self, time: f32, event: impl Event + Clone) { + self.add_event_fn( + time, + move |commands: &mut Commands, entity: Entity, _time: f32, _weight: f32| { + commands.entity(entity).trigger(event.clone()); + }, + ); + } + + /// Add an [`Event`] to an [`AnimationTarget`] named by an [`AnimationTargetId`]. + /// + /// The `event` will be cloned and triggered on the entity matching the target once the `time` (in seconds) + /// is reached in the animation. + /// + /// Use [`add_event`](Self::add_event) instead if you don't have a specific target. + pub fn add_event_to_target( + &mut self, + target_id: AnimationTargetId, + time: f32, + event: impl Event + Clone, + ) { + self.add_event_fn_to_target( + target_id, + time, + move |commands: &mut Commands, entity: Entity, _time: f32, _weight: f32| { + commands.entity(entity).trigger(event.clone()); + }, + ); + } + + /// Add a untargeted event function to this [`AnimationClip`]. + /// + /// The `func` will trigger on the [`AnimationPlayer`] entity once the `time` (in seconds) + /// is reached in the animation. + /// + /// For a simpler [`Event`]-based alternative, see [`AnimationClip::add_event`]. + /// See also [`add_event_to_target`](Self::add_event_to_target). + /// + /// ``` + /// # use bevy_animation::AnimationClip; + /// # let mut clip = AnimationClip::default(); + /// clip.add_event_fn(1.0, |commands, entity, time, weight| { + /// println!("Animation Event Triggered {entity:#?} at time {time} with weight {weight}"); + /// }) + /// ``` + pub fn add_event_fn( + &mut self, + time: f32, + func: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static, + ) { + self.add_event_internal(AnimationEventTarget::Root, time, func); + } + + /// Add an event function to an [`AnimationTarget`] named by an [`AnimationTargetId`]. + /// + /// The `func` will trigger on the entity matching the target once the `time` (in seconds) + /// is reached in the animation. + /// + /// For a simpler [`Event`]-based alternative, see [`AnimationClip::add_event_to_target`]. + /// Use [`add_event`](Self::add_event) instead if you don't have a specific target. + /// + /// ``` + /// # use bevy_animation::{AnimationClip, AnimationTargetId}; + /// # let mut clip = AnimationClip::default(); + /// clip.add_event_fn_to_target(AnimationTargetId::from_iter(["Arm", "Hand"]), 1.0, |commands, entity, time, weight| { + /// println!("Animation Event Triggered {entity:#?} at time {time} with weight {weight}"); + /// }) + /// ``` + pub fn add_event_fn_to_target( + &mut self, + target_id: AnimationTargetId, + time: f32, + func: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static, + ) { + self.add_event_internal(AnimationEventTarget::Node(target_id), time, func); + } + + fn add_event_internal( + &mut self, + target: AnimationEventTarget, + time: f32, + trigger_fn: impl Fn(&mut Commands, Entity, f32, f32) + Send + Sync + 'static, + ) { + self.duration = self.duration.max(time); + let triggers = self.events.entry(target).or_default(); + match triggers.binary_search_by_key(&FloatOrd(time), |e| FloatOrd(e.time)) { + Ok(index) | Err(index) => triggers.insert( + index, + TimedAnimationEvent { + time, + event: AnimationEvent { + trigger: AnimationEventFn(Arc::new(trigger_fn)), + }, + }, + ), + } + } } /// Repetition behavior of an animation. @@ -461,6 +454,14 @@ pub enum AnimationEvaluationError { /// The component to be animated was present, but the property on the /// component wasn't present. PropertyNotPresent(TypeId), + + /// An internal error occurred in the implementation of + /// [`AnimationCurveEvaluator`]. + /// + /// You shouldn't ordinarily see this error unless you implemented + /// [`AnimationCurveEvaluator`] yourself. The contained [`TypeId`] is the ID + /// of the curve evaluator. + InconsistentEvaluatorImplementation(TypeId), } /// An animation that an [`AnimationPlayer`] is currently either playing or was @@ -471,12 +472,6 @@ pub enum AnimationEvaluationError { pub struct ActiveAnimation { /// The factor by which the weight from the [`AnimationGraph`] is multiplied. weight: f32, - /// The actual weight of this animation this frame, taking the - /// [`AnimationGraph`] into account. - computed_weight: f32, - /// The mask groups that are masked out (i.e. won't be animated) this frame, - /// taking the `AnimationGraph` into account. - computed_mask: AnimationMask, repeat: RepeatAnimation, speed: f32, /// Total time the animation has been played. @@ -487,9 +482,13 @@ pub struct ActiveAnimation { /// /// Note: This will always be in the range [0.0, animation clip duration] seek_time: f32, + /// The `seek_time` of the previous tick, if any. + last_seek_time: Option, /// Number of times the animation has completed. /// If the animation is playing in reverse, this increments when the animation passes the start. completions: u32, + /// `true` if the animation was completed at least once this tick. + just_completed: bool, paused: bool, } @@ -497,13 +496,13 @@ impl Default for ActiveAnimation { fn default() -> Self { Self { weight: 1.0, - computed_weight: 1.0, - computed_mask: 0, repeat: RepeatAnimation::default(), speed: 1.0, elapsed: 0.0, seek_time: 0.0, + last_seek_time: None, completions: 0, + just_completed: false, paused: false, } } @@ -525,6 +524,9 @@ impl ActiveAnimation { /// Update the animation given the delta time and the duration of the clip being played. #[inline] fn update(&mut self, delta: f32, clip_duration: f32) { + self.just_completed = false; + self.last_seek_time = Some(self.seek_time); + if self.is_finished() { return; } @@ -536,6 +538,7 @@ impl ActiveAnimation { let under_time = self.speed < 0.0 && self.seek_time < 0.0; if over_time || under_time { + self.just_completed = true; self.completions += 1; if self.is_finished() { @@ -553,8 +556,10 @@ impl ActiveAnimation { /// Reset back to the initial state as if no time has elapsed. pub fn replay(&mut self) { + self.just_completed = false; self.completions = 0; self.elapsed = 0.0; + self.last_seek_time = None; self.seek_time = 0.0; } @@ -639,13 +644,33 @@ impl ActiveAnimation { } /// Seeks to a specific time in the animation. + /// + /// This will not trigger events between the current time and `seek_time`. + /// Use [`seek_to`](Self::seek_to) if this is desired. + pub fn set_seek_time(&mut self, seek_time: f32) -> &mut Self { + self.last_seek_time = Some(seek_time); + self.seek_time = seek_time; + self + } + + /// Seeks to a specific time in the animation. + /// + /// Note that any events between the current time and `seek_time` + /// will be triggered on the next update. + /// Use [`set_seek_time`](Self::set_seek_time) if this is undesired. pub fn seek_to(&mut self, seek_time: f32) -> &mut Self { + self.last_seek_time = Some(self.seek_time); self.seek_time = seek_time; self } /// Seeks to the beginning of the animation. + /// + /// Note that any events between the current time and `0.0` + /// will be triggered on the next update. + /// Use [`set_seek_time`](Self::set_seek_time) if this is undesired. pub fn rewind(&mut self) -> &mut Self { + self.last_seek_time = Some(self.seek_time); self.seek_time = 0.0; self } @@ -658,9 +683,7 @@ impl ActiveAnimation { #[derive(Component, Default, Reflect)] #[reflect(Component, Default)] pub struct AnimationPlayer { - /// We use a `BTreeMap` instead of a `HashMap` here to ensure a consistent - /// ordering when applying the animations. - active_animations: BTreeMap, + active_animations: HashMap, blend_weights: HashMap, } @@ -679,27 +702,112 @@ impl Clone for AnimationPlayer { } } -/// Information needed during the traversal of the animation graph in -/// [`advance_animations`]. +/// Temporary data that the [`animate_targets`] system maintains. #[derive(Default)] -pub struct AnimationGraphEvaluator { - /// The stack used for the depth-first search of the graph. - dfs_stack: Vec, - /// The list of visited nodes during the depth-first traversal. - dfs_visited: FixedBitSet, - /// Accumulated weights and masks for each node. - nodes: Vec, +pub struct AnimationEvaluationState { + /// Stores all [`AnimationCurveEvaluator`]s corresponding to properties that + /// we've seen so far. + /// + /// This is a mapping from the id of an animation curve evaluator to + /// the animation curve evaluator itself. + /// + /// For efficiency's sake, the [`AnimationCurveEvaluator`]s are cached from + /// frame to frame and animation target to animation target. Therefore, + /// there may be entries in this list corresponding to properties that the + /// current [`AnimationPlayer`] doesn't animate. To iterate only over the + /// properties that are currently being animated, consult the + /// [`Self::current_evaluators`] set. + evaluators: AnimationCurveEvaluators, + + /// The set of [`AnimationCurveEvaluator`] types that the current + /// [`AnimationPlayer`] is animating. + /// + /// This is built up as new curve evaluators are encountered during graph + /// traversal. + current_evaluators: CurrentEvaluators, } -/// The accumulated weight and computed mask for a single node. -#[derive(Clone, Copy, Default, Debug)] -struct EvaluatedAnimationGraphNode { - /// The weight that has been accumulated for this node, taking its - /// ancestors' weights into account. - weight: f32, - /// The mask that has been computed for this node, taking its ancestors' - /// masks into account. - mask: AnimationMask, +#[derive(Default)] +struct AnimationCurveEvaluators { + component_property_curve_evaluators: + PreHashMap<(TypeId, usize), Box>, + type_id_curve_evaluators: TypeIdMap>, +} + +impl AnimationCurveEvaluators { + #[inline] + pub(crate) fn get_mut(&mut self, id: EvaluatorId) -> Option<&mut dyn AnimationCurveEvaluator> { + match id { + EvaluatorId::ComponentField(component_property) => self + .component_property_curve_evaluators + .get_mut(component_property), + EvaluatorId::Type(type_id) => self.type_id_curve_evaluators.get_mut(&type_id), + } + .map(|e| &mut **e) + } + + #[inline] + pub(crate) fn get_or_insert_with( + &mut self, + id: EvaluatorId, + func: impl FnOnce() -> Box, + ) -> &mut dyn AnimationCurveEvaluator { + match id { + EvaluatorId::ComponentField(component_property) => &mut **self + .component_property_curve_evaluators + .get_or_insert_with(component_property, func), + EvaluatorId::Type(type_id) => match self.type_id_curve_evaluators.entry(type_id) { + bevy_utils::hashbrown::hash_map::Entry::Occupied(occupied_entry) => { + &mut **occupied_entry.into_mut() + } + bevy_utils::hashbrown::hash_map::Entry::Vacant(vacant_entry) => { + &mut **vacant_entry.insert(func()) + } + }, + } + } +} + +#[derive(Default)] +struct CurrentEvaluators { + component_properties: PreHashMap<(TypeId, usize), ()>, + type_ids: TypeIdMap<()>, +} + +impl CurrentEvaluators { + pub(crate) fn keys(&self) -> impl Iterator { + self.component_properties + .keys() + .map(EvaluatorId::ComponentField) + .chain(self.type_ids.keys().copied().map(EvaluatorId::Type)) + } + + pub(crate) fn clear( + &mut self, + mut visit: impl FnMut(EvaluatorId) -> Result<(), AnimationEvaluationError>, + ) -> Result<(), AnimationEvaluationError> { + for (key, _) in self.component_properties.drain() { + (visit)(EvaluatorId::ComponentField(&key))?; + } + + for (key, _) in self.type_ids.drain() { + (visit)(EvaluatorId::Type(key))?; + } + + Ok(()) + } + + #[inline] + pub(crate) fn insert(&mut self, id: EvaluatorId) { + match id { + EvaluatorId::ComponentField(component_property) => { + self.component_properties.insert(*component_property, ()); + } + EvaluatorId::Type(type_id) => { + self.type_ids.insert(type_id, ()); + } + } + } } impl AnimationPlayer { @@ -744,8 +852,8 @@ impl AnimationPlayer { self.active_animations.iter_mut() } - #[deprecated = "Use `animation_is_playing` instead"] - /// Check if the given animation node is being played. + /// Returns true if the animation is currently playing or paused, or false + /// if the animation is stopped. pub fn is_playing_animation(&self, animation: AnimationNodeIndex) -> bool { self.active_animations.contains_key(&animation) } @@ -832,6 +940,7 @@ impl AnimationPlayer { self.active_animations.get_mut(&animation) } + #[deprecated = "Use `is_playing_animation` instead"] /// Returns true if the animation is currently playing or paused, or false /// if the animation is stopped. pub fn animation_is_playing(&self, animation: AnimationNodeIndex) -> bool { @@ -839,15 +948,56 @@ impl AnimationPlayer { } } +/// A system that triggers untargeted animation events for the currently-playing animations. +fn trigger_untargeted_animation_events( + mut commands: Commands, + clips: Res>, + graphs: Res>, + players: Query<(Entity, &AnimationPlayer, &AnimationGraphHandle)>, +) { + for (entity, player, graph_id) in &players { + // The graph might not have loaded yet. Safely bail. + let Some(graph) = graphs.get(graph_id) else { + return; + }; + + for (index, active_animation) in player.active_animations.iter() { + if active_animation.paused { + continue; + } + + let Some(clip) = graph + .get(*index) + .and_then(|node| match &node.node_type { + AnimationNodeType::Clip(handle) => Some(handle), + AnimationNodeType::Blend | AnimationNodeType::Add => None, + }) + .and_then(|id| clips.get(id)) + else { + continue; + }; + + let Some(triggered_events) = + TriggeredEvents::from_animation(AnimationEventTarget::Root, clip, active_animation) + else { + continue; + }; + + for TimedAnimationEvent { time, event } in triggered_events.iter() { + event.trigger(&mut commands, entity, *time, active_animation.weight); + } + } + } +} + /// A system that advances the time for all playing animations. pub fn advance_animations( time: Res` and `RotationCurve` + // will both yield a `RotationCurveEvaluator` and + // therefore will share the same evaluator in this + // table. + let curve_evaluator_id = (*curve.0).evaluator_id(); + let curve_evaluator = evaluation_state + .evaluators + .get_or_insert_with(curve_evaluator_id.clone(), || { + curve.0.create_evaluator() + }); + + evaluation_state + .current_evaluators + .insert(curve_evaluator_id); + + if let Err(err) = AnimationCurve::apply( + &*curve.0, + curve_evaluator, + seek_time, + weight, + animation_graph_node_index, + ) { + warn!("Animation application failed: {:?}", err); + } + } } } } + + if let Err(err) = evaluation_state.commit_all(entity_mut) { + warn!("Animation application failed: {:?}", err); + } }); } @@ -1049,10 +1246,14 @@ impl Plugin for AnimationPlugin { .register_type::() .register_type::() .register_type::() + .register_type::() .register_type::() + .register_type::() + .init_resource::() .add_systems( PostUpdate, ( + graph::thread_animation_graphs, advance_transitions, advance_animations, // TODO: `animate_targets` can animate anything, so @@ -1062,13 +1263,14 @@ impl Plugin for AnimationPlugin { // `PostUpdate`. For now, we just disable ambiguity testing // for this system. animate_targets - .after(bevy_render::mesh::morph::inherit_weights) + .after(bevy_render::mesh::inherit_weights) .ambiguous_with_all(), + trigger_untargeted_animation_events, expire_completed_transitions, ) .chain() - .before(TransformSystem::TransformPropagate) - .before(UiSystem::Prepare), + .in_set(Animation) + .before(TransformSystem::TransformPropagate), ); } } @@ -1094,23 +1296,378 @@ impl AnimationTargetId { } } +impl> FromIterator for AnimationTargetId { + /// Creates a new [`AnimationTargetId`] by hashing a list of strings. + /// + /// Typically, this will be the path from the animation root to the + /// animation target (e.g. bone) that is to be animated. + fn from_iter>(iter: I) -> Self { + let mut blake3 = blake3::Hasher::new(); + blake3.update(ANIMATION_TARGET_NAMESPACE.as_bytes()); + for str in iter { + blake3.update(str.as_ref().as_bytes()); + } + let hash = blake3.finalize().as_bytes()[0..16].try_into().unwrap(); + Self(*uuid::Builder::from_sha1_bytes(hash).as_uuid()) + } +} + impl From<&Name> for AnimationTargetId { fn from(name: &Name) -> Self { AnimationTargetId::from_name(name) } } -impl AnimationGraphEvaluator { - // Starts a new depth-first search. - fn reset(&mut self, root: AnimationNodeIndex, node_count: usize) { - self.dfs_stack.clear(); - self.dfs_stack.push(root); +impl AnimationEvaluationState { + /// Calls [`AnimationCurveEvaluator::blend`] on all curve evaluator types + /// that we've been building up for a single target. + /// + /// The given `node_index` is the node that we're evaluating. + fn blend_all( + &mut self, + node_index: AnimationNodeIndex, + ) -> Result<(), AnimationEvaluationError> { + for curve_evaluator_type in self.current_evaluators.keys() { + self.evaluators + .get_mut(curve_evaluator_type) + .unwrap() + .blend(node_index)?; + } + Ok(()) + } + + /// Calls [`AnimationCurveEvaluator::add`] on all curve evaluator types + /// that we've been building up for a single target. + /// + /// The given `node_index` is the node that we're evaluating. + fn add_all(&mut self, node_index: AnimationNodeIndex) -> Result<(), AnimationEvaluationError> { + for curve_evaluator_type in self.current_evaluators.keys() { + self.evaluators + .get_mut(curve_evaluator_type) + .unwrap() + .add(node_index)?; + } + Ok(()) + } + + /// Calls [`AnimationCurveEvaluator::push_blend_register`] on all curve + /// evaluator types that we've been building up for a single target. + /// + /// The `weight` parameter is the weight that should be pushed onto the + /// stack, while the `node_index` parameter is the node that we're + /// evaluating. + fn push_blend_register_all( + &mut self, + weight: f32, + node_index: AnimationNodeIndex, + ) -> Result<(), AnimationEvaluationError> { + for curve_evaluator_type in self.current_evaluators.keys() { + self.evaluators + .get_mut(curve_evaluator_type) + .unwrap() + .push_blend_register(weight, node_index)?; + } + Ok(()) + } + + /// Calls [`AnimationCurveEvaluator::commit`] on all curve evaluator types + /// that we've been building up for a single target. + /// + /// This is the call that actually writes the computed values into the + /// components being animated. + fn commit_all( + &mut self, + mut entity_mut: AnimationEntityMut, + ) -> Result<(), AnimationEvaluationError> { + self.current_evaluators.clear(|id| { + self.evaluators + .get_mut(id) + .unwrap() + .commit(entity_mut.reborrow()) + }) + } +} + +/// All the events from an [`AnimationClip`] that occurred this tick. +#[derive(Debug, Clone)] +struct TriggeredEvents<'a> { + direction: TriggeredEventsDir, + lower: &'a [TimedAnimationEvent], + upper: &'a [TimedAnimationEvent], +} + +impl<'a> TriggeredEvents<'a> { + fn from_animation( + target: AnimationEventTarget, + clip: &'a AnimationClip, + active_animation: &ActiveAnimation, + ) -> Option { + let events = clip.events.get(&target)?; + let reverse = active_animation.is_playback_reversed(); + let is_finished = active_animation.is_finished(); + + // Return early if the animation have finished on a previous tick. + if is_finished && !active_animation.just_completed { + return None; + } + + // The animation completed this tick, while still playing. + let looping = active_animation.just_completed && !is_finished; + let direction = match (reverse, looping) { + (false, false) => TriggeredEventsDir::Forward, + (false, true) => TriggeredEventsDir::ForwardLooping, + (true, false) => TriggeredEventsDir::Reverse, + (true, true) => TriggeredEventsDir::ReverseLooping, + }; + + let last_time = active_animation.last_seek_time?; + let this_time = active_animation.seek_time; + + let (lower, upper) = match direction { + // Return all events where last_time <= event.time < this_time. + TriggeredEventsDir::Forward => { + let start = events.partition_point(|event| event.time < last_time); + // The animation finished this tick, return any remaining events. + if is_finished { + (&events[start..], &events[0..0]) + } else { + let end = events.partition_point(|event| event.time < this_time); + (&events[start..end], &events[0..0]) + } + } + // Return all events where this_time < event.time <= last_time. + TriggeredEventsDir::Reverse => { + let end = events.partition_point(|event| event.time <= last_time); + // The animation finished, return any remaining events. + if is_finished { + (&events[..end], &events[0..0]) + } else { + let start = events.partition_point(|event| event.time <= this_time); + (&events[start..end], &events[0..0]) + } + } + // The animation is looping this tick and we have to return events where + // either last_tick <= event.time or event.time < this_tick. + TriggeredEventsDir::ForwardLooping => { + let upper_start = events.partition_point(|event| event.time < last_time); + let lower_end = events.partition_point(|event| event.time < this_time); + + let upper = &events[upper_start..]; + let lower = &events[..lower_end]; + (lower, upper) + } + // The animation is looping this tick and we have to return events where + // either last_tick >= event.time or event.time > this_tick. + TriggeredEventsDir::ReverseLooping => { + let lower_end = events.partition_point(|event| event.time <= last_time); + let upper_start = events.partition_point(|event| event.time <= this_time); + + let upper = &events[upper_start..]; + let lower = &events[..lower_end]; + (lower, upper) + } + }; + Some(Self { + direction, + lower, + upper, + }) + } + + fn is_empty(&self) -> bool { + self.lower.is_empty() && self.upper.is_empty() + } + + fn iter(&self) -> TriggeredEventsIter { + match self.direction { + TriggeredEventsDir::Forward => TriggeredEventsIter::Forward(self.lower.iter()), + TriggeredEventsDir::Reverse => TriggeredEventsIter::Reverse(self.lower.iter().rev()), + TriggeredEventsDir::ForwardLooping => TriggeredEventsIter::ForwardLooping { + upper: self.upper.iter(), + lower: self.lower.iter(), + }, + TriggeredEventsDir::ReverseLooping => TriggeredEventsIter::ReverseLooping { + lower: self.lower.iter().rev(), + upper: self.upper.iter().rev(), + }, + } + } +} + +#[derive(Debug, Clone, Copy)] +enum TriggeredEventsDir { + /// The animation is playing normally + Forward, + /// The animation is playing in reverse + Reverse, + /// The animation is looping this tick + ForwardLooping, + /// The animation playing in reverse and looping this tick + ReverseLooping, +} - self.dfs_visited.grow(node_count); - self.dfs_visited.clear(); +#[derive(Debug, Clone)] +enum TriggeredEventsIter<'a> { + Forward(slice::Iter<'a, TimedAnimationEvent>), + Reverse(iter::Rev>), + ForwardLooping { + upper: slice::Iter<'a, TimedAnimationEvent>, + lower: slice::Iter<'a, TimedAnimationEvent>, + }, + ReverseLooping { + lower: iter::Rev>, + upper: iter::Rev>, + }, +} - self.nodes.clear(); - self.nodes - .extend(iter::repeat(EvaluatedAnimationGraphNode::default()).take(node_count)); +impl<'a> Iterator for TriggeredEventsIter<'a> { + type Item = &'a TimedAnimationEvent; + + fn next(&mut self) -> Option { + match self { + TriggeredEventsIter::Forward(iter) => iter.next(), + TriggeredEventsIter::Reverse(rev) => rev.next(), + TriggeredEventsIter::ForwardLooping { upper, lower } => { + upper.next().or_else(|| lower.next()) + } + TriggeredEventsIter::ReverseLooping { lower, upper } => { + lower.next().or_else(|| upper.next()) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[derive(Event, Reflect, Clone)] + struct A; + + #[track_caller] + fn assert_triggered_events_with( + active_animation: &ActiveAnimation, + clip: &AnimationClip, + expected: impl Into>, + ) { + let Some(events) = + TriggeredEvents::from_animation(AnimationEventTarget::Root, clip, active_animation) + else { + assert_eq!(expected.into(), Vec::::new()); + return; + }; + let got: Vec<_> = events.iter().map(|t| t.time).collect(); + assert_eq!( + expected.into(), + got, + "\n{events:#?}\nlast_time: {:?}\nthis_time:{}", + active_animation.last_seek_time, + active_animation.seek_time + ); + } + + #[test] + fn test_multiple_events_triggers() { + let mut active_animation = ActiveAnimation { + repeat: RepeatAnimation::Forever, + ..Default::default() + }; + let mut clip = AnimationClip { + duration: 1.0, + ..Default::default() + }; + clip.add_event(0.5, A); + clip.add_event(0.5, A); + clip.add_event(0.5, A); + + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.8, clip.duration); // 0.0 : 0.8 + assert_triggered_events_with(&active_animation, &clip, [0.5, 0.5, 0.5]); + + clip.add_event(1.0, A); + clip.add_event(0.0, A); + clip.add_event(1.0, A); + clip.add_event(0.0, A); + + active_animation.update(0.4, clip.duration); // 0.8 : 0.2 + assert_triggered_events_with(&active_animation, &clip, [1.0, 1.0, 0.0, 0.0]); + } + + #[test] + fn test_events_triggers() { + let mut active_animation = ActiveAnimation::default(); + let mut clip = AnimationClip::default(); + clip.add_event(0.2, A); + clip.add_event(0.0, A); + assert_eq!(0.2, clip.duration); + + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.0 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.0]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.2 + assert_triggered_events_with(&active_animation, &clip, [0.2]); + active_animation.update(0.1, clip.duration); // 0.2 : 0.2 + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.2 : 0.2 + assert_triggered_events_with(&active_animation, &clip, []); + + active_animation.speed = -1.0; + active_animation.completions = 0; + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.2 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.2]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.0 + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.0 : 0.0 + assert_triggered_events_with(&active_animation, &clip, [0.0]); + active_animation.update(0.1, clip.duration); // 0.0 : 0.0 + assert_triggered_events_with(&active_animation, &clip, []); + } + + #[test] + fn test_events_triggers_looping() { + let mut active_animation = ActiveAnimation { + repeat: RepeatAnimation::Forever, + ..Default::default() + }; + let mut clip = AnimationClip::default(); + clip.add_event(0.3, A); + clip.add_event(0.0, A); + clip.add_event(0.2, A); + assert_eq!(0.3, clip.duration); + + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.0 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.0]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.2 + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.2 : 0.3 + assert_triggered_events_with(&active_animation, &clip, [0.2, 0.3]); + active_animation.update(0.1, clip.duration); // 0.3 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.0]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.2 + assert_triggered_events_with(&active_animation, &clip, []); + + active_animation.speed = -1.0; + active_animation.update(0.1, clip.duration); // 0.2 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.2]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.0 + assert_triggered_events_with(&active_animation, &clip, []); + active_animation.update(0.1, clip.duration); // 0.0 : 0.2 + assert_triggered_events_with(&active_animation, &clip, [0.0, 0.3]); + active_animation.update(0.1, clip.duration); // 0.2 : 0.1 + assert_triggered_events_with(&active_animation, &clip, [0.2]); + active_animation.update(0.1, clip.duration); // 0.1 : 0.0 + assert_triggered_events_with(&active_animation, &clip, []); + + active_animation.replay(); + active_animation.update(clip.duration, clip.duration); // 0.0 : 0.0 + assert_triggered_events_with(&active_animation, &clip, [0.0, 0.3, 0.2]); + + active_animation.replay(); + active_animation.seek_time = clip.duration; + active_animation.last_seek_time = Some(clip.duration); + active_animation.update(clip.duration, clip.duration); // 0.3 : 0.0 + assert_triggered_events_with(&active_animation, &clip, [0.3, 0.2]); } } diff --git a/crates/bevy_animation/src/transition.rs b/crates/bevy_animation/src/transition.rs index 77e8fdfef5ca3e..679c63bec3ffb0 100644 --- a/crates/bevy_animation/src/transition.rs +++ b/crates/bevy_animation/src/transition.rs @@ -18,7 +18,7 @@ use crate::{graph::AnimationNodeIndex, ActiveAnimation, AnimationPlayer}; /// between animations. /// /// To use this component, place it on the same entity as the -/// [`AnimationPlayer`] and [`bevy_asset::Handle`]. It'll take +/// [`AnimationPlayer`] and [`AnimationGraphHandle`](crate::AnimationGraphHandle). It'll take /// responsibility for adjusting the weight on the [`ActiveAnimation`] in order /// to fade out animations smoothly. /// @@ -122,7 +122,7 @@ pub fn advance_transitions( for transition in &mut animation_transitions.transitions.iter_mut().rev() { // Decrease weight. transition.current_weight = (transition.current_weight - - transition.weight_decline_per_sec * time.delta_seconds()) + - transition.weight_decline_per_sec * time.delta_secs()) .max(0.0); // Update weight. diff --git a/crates/bevy_app/Cargo.toml b/crates/bevy_app/Cargo.toml index 758654b6566953..74c00dd78a8071 100644 --- a/crates/bevy_app/Cargo.toml +++ b/crates/bevy_app/Cargo.toml @@ -29,7 +29,8 @@ bevy_tasks = { path = "../bevy_tasks", version = "0.15.0-dev" } # other downcast-rs = "1.2.0" -thiserror = "1.0" +thiserror = { version = "2", default-features = false } +variadics_please = "1.0" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] ctrlc = "3.4.4" diff --git a/crates/bevy_app/src/app.rs b/crates/bevy_app/src/app.rs index 33d42b3cb8683f..4ff12abd74a5bd 100644 --- a/crates/bevy_app/src/app.rs +++ b/crates/bevy_app/src/app.rs @@ -124,7 +124,7 @@ impl App { Self { sub_apps: SubApps { main: SubApp::new(), - sub_apps: HashMap::new(), + sub_apps: HashMap::default(), }, runner: Box::new(run_once), } @@ -800,8 +800,8 @@ impl App { /// commands.spawn(A); /// } /// - /// fn validate(query: Query<(&A, &B, &C)>) { - /// let (a, b, c) = query.single(); + /// fn validate(query: Option>) { + /// let (a, b, c) = query.unwrap().into_inner(); /// assert_eq!(b, &B(0)); /// assert_eq!(c, &C(0)); /// } @@ -863,8 +863,8 @@ impl App { /// commands.spawn(A); /// } /// - /// fn validate(query: Query<(&A, &B, &C)>) { - /// let (a, b, c) = query.single(); + /// fn validate(query: Option>) { + /// let (a, b, c) = query.unwrap().into_inner(); /// assert_eq!(b, &B(0)); /// assert_eq!(c, &C(2)); /// } @@ -928,8 +928,8 @@ impl App { /// commands.spawn(A); /// } /// - /// fn validate(query: Query<(&A, &B, &C)>) { - /// let (a, b, c) = query.single(); + /// fn validate(query: Option>) { + /// let (a, b, c) = query.unwrap().into_inner(); /// assert_eq!(b, &B(0)); /// assert_eq!(c, &C(0)); /// } @@ -993,8 +993,8 @@ impl App { /// commands.spawn(A); /// } /// - /// fn validate(query: Query<(&A, &B, &C)>) { - /// let (a, b, c) = query.single(); + /// fn validate(query: Option>) { + /// let (a, b, c) = query.unwrap().into_inner(); /// assert_eq!(b, &B(0)); /// assert_eq!(c, &C(2)); /// } @@ -1008,12 +1008,18 @@ impl App { .try_register_required_components_with::(constructor) } - /// Returns a reference to the [`World`]. + /// Returns a reference to the main [`SubApp`]'s [`World`]. This is the same as calling + /// [`app.main().world()`]. + /// + /// [`app.main().world()`]: SubApp::world pub fn world(&self) -> &World { self.main().world() } - /// Returns a mutable reference to the [`World`]. + /// Returns a mutable reference to the main [`SubApp`]'s [`World`]. This is the same as calling + /// [`app.main_mut().world_mut()`]. + /// + /// [`app.main_mut().world_mut()`]: SubApp::world_mut pub fn world_mut(&mut self) -> &mut World { self.main_mut().world_mut() } @@ -1267,7 +1273,7 @@ impl App { /// # struct Friend; /// # /// // An observer system can be any system where the first parameter is a trigger - /// app.observe(|trigger: Trigger, friends: Query>, mut commands: Commands| { + /// app.add_observer(|trigger: Trigger, friends: Query>, mut commands: Commands| { /// if trigger.event().friends_allowed { /// for friend in friends.iter() { /// commands.trigger_targets(Invite, friend); @@ -1275,11 +1281,11 @@ impl App { /// } /// }); /// ``` - pub fn observe( + pub fn add_observer( &mut self, observer: impl IntoObserverSystem, ) -> &mut Self { - self.world_mut().observe(observer); + self.world_mut().add_observer(observer); self } } diff --git a/crates/bevy_app/src/main_schedule.rs b/crates/bevy_app/src/main_schedule.rs index f2b549443769cd..834205cf5347f6 100644 --- a/crates/bevy_app/src/main_schedule.rs +++ b/crates/bevy_app/src/main_schedule.rs @@ -181,6 +181,10 @@ pub struct PostUpdate; #[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)] pub struct Last; +/// Animation system set. This exists in [`PostUpdate`]. +#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)] +pub struct Animation; + /// Defines the schedules to be run for the [`Main`] schedule, including /// their order. #[derive(Resource, Debug)] diff --git a/crates/bevy_app/src/plugin.rs b/crates/bevy_app/src/plugin.rs index c264264695c630..73c2e452a81eef 100644 --- a/crates/bevy_app/src/plugin.rs +++ b/crates/bevy_app/src/plugin.rs @@ -129,7 +129,7 @@ pub trait Plugins: sealed::Plugins {} impl Plugins for T where T: sealed::Plugins {} mod sealed { - use bevy_utils::all_tuples; + use variadics_please::all_tuples; use crate::{App, AppError, Plugin, PluginGroup}; diff --git a/crates/bevy_app/src/plugin_group.rs b/crates/bevy_app/src/plugin_group.rs index 2fbff372f784ff..e828a012d0400c 100644 --- a/crates/bevy_app/src/plugin_group.rs +++ b/crates/bevy_app/src/plugin_group.rs @@ -48,6 +48,17 @@ use core::any::TypeId; /// # impl Plugin for WebCompatibilityPlugin { fn build(&self, _: &mut App) {} } /// # } /// # +/// # mod audio { +/// # use bevy_app::*; +/// # #[derive(Default)] +/// # pub struct AudioPlugins; +/// # impl PluginGroup for AudioPlugins { +/// # fn build(self) -> PluginGroupBuilder { +/// # PluginGroupBuilder::start::() +/// # } +/// # } +/// # } +/// # /// # mod internal { /// # use bevy_app::*; /// # #[derive(Default)] @@ -75,6 +86,10 @@ use core::any::TypeId; /// // generation, in which case you must wrap it in `#[custom()]`. /// #[custom(cfg(target_arch = "wasm32"))] /// web:::WebCompatibilityPlugin, +/// // You can nest `PluginGroup`s within other `PluginGroup`s, you just need the +/// // `#[plugin_group]` attribute. +/// #[plugin_group] +/// audio:::AudioPlugins, /// // You can hide plugins from documentation. Due to macro limitations, hidden plugins /// // must be last. /// #[doc(hidden)] @@ -94,6 +109,14 @@ macro_rules! plugin_group { $(#[custom($plugin_meta:meta)])* $($plugin_path:ident::)* : $plugin_name:ident ),* + $( + $(,)?$( + #[plugin_group] + $(#[cfg(feature = $plugin_group_feature:literal)])? + $(#[custom($plugin_group_meta:meta)])* + $($plugin_group_path:ident::)* : $plugin_group_name:ident + ),+ + )? $( $(,)?$( #[doc(hidden)] @@ -113,6 +136,10 @@ macro_rules! plugin_group { " - [`", stringify!($plugin_name), "`](" $(, stringify!($plugin_path), "::")*, stringify!($plugin_name), ")" $(, " - with feature `", $plugin_feature, "`")? )])* + $($(#[doc = concat!( + " - [`", stringify!($plugin_group_name), "`](" $(, stringify!($plugin_group_path), "::")*, stringify!($plugin_group_name), ")" + $(, " - with feature `", $plugin_group_feature, "`")? + )]),+)? $( /// $(#[doc = $post_doc])+ @@ -135,6 +162,18 @@ macro_rules! plugin_group { group = group.add(<$($plugin_path::)*$plugin_name>::default()); } )* + $($( + $(#[cfg(feature = $plugin_group_feature)])? + $(#[$plugin_group_meta])* + { + const _: () = { + const fn check_default() {} + check_default::<$($plugin_group_path::)*$plugin_group_name>(); + }; + + group = group.add_group(<$($plugin_group_path::)*$plugin_group_name>::default()); + } + )+)? $($( $(#[cfg(feature = $hidden_plugin_feature)])? $(#[$hidden_plugin_meta])* diff --git a/crates/bevy_app/src/sub_app.rs b/crates/bevy_app/src/sub_app.rs index 901ce891a4a622..93cbd089483f8f 100644 --- a/crates/bevy_app/src/sub_app.rs +++ b/crates/bevy_app/src/sub_app.rs @@ -37,6 +37,7 @@ type ExtractFn = Box; /// /// // Create a sub-app with the same resource and a single schedule. /// let mut sub_app = SubApp::new(); +/// sub_app.update_schedule = Some(Main.intern()); /// sub_app.insert_resource(Val(100)); /// /// // Setup an extract function to copy the resource's value in the main world. diff --git a/crates/bevy_asset/Cargo.toml b/crates/bevy_asset/Cargo.toml index 4bee550a4879c4..e1d62ad2a2169f 100644 --- a/crates/bevy_asset/Cargo.toml +++ b/crates/bevy_asset/Cargo.toml @@ -33,6 +33,7 @@ atomicow = "1.0" async-broadcast = "0.5" async-fs = "2.0" async-lock = "3.0" +bitflags = { version = "2.3", features = ["serde"] } crossbeam-channel = "0.5" downcast-rs = "1.2" disqualified = "1.0" @@ -43,7 +44,8 @@ blake3 = "1.5" parking_lot = { version = "0.12", features = ["arc_lock", "send_guard"] } ron = "0.8" serde = { version = "1", features = ["derive"] } -thiserror = "1.0" +thiserror = { version = "2", default-features = false } +derive_more = { version = "1", default-features = false, features = ["from"] } uuid = { version = "1.0", features = ["v4"] } [target.'cfg(target_os = "android")'.dependencies] @@ -60,7 +62,7 @@ wasm-bindgen-futures = "0.4" js-sys = "0.3" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] -notify-debouncer-full = { version = "0.3.1", optional = true } +notify-debouncer-full = { version = "0.4.0", optional = true } [dev-dependencies] bevy_core = { path = "../bevy_core", version = "0.15.0-dev" } diff --git a/crates/bevy_asset/src/event.rs b/crates/bevy_asset/src/event.rs index 406d2398ea2879..832cc212d4b015 100644 --- a/crates/bevy_asset/src/event.rs +++ b/crates/bevy_asset/src/event.rs @@ -1,5 +1,6 @@ use crate::{Asset, AssetId, AssetLoadError, AssetPath, UntypedAssetId}; use bevy_ecs::event::Event; +use bevy_reflect::Reflect; use core::fmt::Debug; /// An event emitted when a specific [`Asset`] fails to load. @@ -42,7 +43,7 @@ impl From<&AssetLoadFailedEvent> for UntypedAssetLoadFailedEvent { } /// Events that occur for a specific loaded [`Asset`], such as "value changed" events and "dependency" events. -#[derive(Event)] +#[derive(Event, Reflect)] pub enum AssetEvent { /// Emitted whenever an [`Asset`] is added. Added { id: AssetId }, diff --git a/crates/bevy_asset/src/handle.rs b/crates/bevy_asset/src/handle.rs index 08e10e8e620bf4..9c61ff0f88c7b4 100644 --- a/crates/bevy_asset/src/handle.rs +++ b/crates/bevy_asset/src/handle.rs @@ -3,7 +3,6 @@ use crate::{ UntypedAssetId, }; use alloc::sync::Arc; -use bevy_ecs::prelude::*; use bevy_reflect::{std_traits::ReflectDefault, Reflect, TypePath}; use core::{ any::TypeId, @@ -122,8 +121,8 @@ impl core::fmt::Debug for StrongHandle { /// of the [`Handle`] are dropped. /// /// [`Handle::Strong`] also provides access to useful [`Asset`] metadata, such as the [`AssetPath`] (if it exists). -#[derive(Component, Reflect)] -#[reflect(Default, Component, Debug, Hash, PartialEq)] +#[derive(Reflect)] +#[reflect(Default, Debug, Hash, PartialEq)] pub enum Handle { /// A "strong" reference to a live (or loading) [`Asset`]. If a [`Handle`] is [`Handle::Strong`], the [`Asset`] will be kept /// alive until the [`Handle`] is dropped. Strong handles also provide access to additional asset metadata. @@ -516,6 +515,8 @@ pub enum UntypedAssetConversionError { #[cfg(test)] mod tests { use bevy_reflect::PartialReflect; + use bevy_utils::FixedHasher; + use core::hash::BuildHasher; use super::*; @@ -526,9 +527,7 @@ mod tests { /// Simple utility to directly hash a value using a fixed hasher fn hash(data: &T) -> u64 { - let mut hasher = bevy_utils::AHasher::default(); - data.hash(&mut hasher); - hasher.finish() + FixedHasher.hash_one(data) } /// Typed and Untyped `Handles` should be equivalent to each other and themselves diff --git a/crates/bevy_asset/src/id.rs b/crates/bevy_asset/src/id.rs index 1cca57278eb915..07a6d3db1209ef 100644 --- a/crates/bevy_asset/src/id.rs +++ b/crates/bevy_asset/src/id.rs @@ -9,6 +9,7 @@ use core::{ hash::Hash, marker::PhantomData, }; +use derive_more::derive::From; use thiserror::Error; /// A unique runtime-only identifier for an [`Asset`]. This is cheap to [`Copy`]/[`Clone`] and is not directly tied to the @@ -17,7 +18,7 @@ use thiserror::Error; /// For an identifier tied to the lifetime of an asset, see [`Handle`](`crate::Handle`). /// /// For an "untyped" / "generic-less" id, see [`UntypedAssetId`]. -#[derive(Reflect, Serialize, Deserialize)] +#[derive(Reflect, Serialize, Deserialize, From)] pub enum AssetId { /// A small / efficient runtime identifier that can be used to efficiently look up an asset stored in [`Assets`]. This is /// the "default" identifier used for assets. The alternative(s) (ex: [`AssetId::Uuid`]) will only be used if assets are @@ -154,13 +155,6 @@ impl From for AssetId { } } -impl From for AssetId { - #[inline] - fn from(value: Uuid) -> Self { - Self::Uuid { uuid: value } - } -} - /// An "untyped" / "generic-less" [`Asset`] identifier that behaves much like [`AssetId`], but stores the [`Asset`] type /// information at runtime instead of compile-time. This increases the size of the type, but it enables storing asset ids /// across asset types together and enables comparisons between them. @@ -310,7 +304,7 @@ impl PartialOrd for UntypedAssetId { /// Do not _ever_ use this across asset types for comparison. /// [`InternalAssetId`] contains no type information and will happily collide /// with indices across types. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord, From)] pub(crate) enum InternalAssetId { Index(AssetIndex), Uuid(Uuid), @@ -337,18 +331,6 @@ impl InternalAssetId { } } -impl From for InternalAssetId { - fn from(value: AssetIndex) -> Self { - Self::Index(value) - } -} - -impl From for InternalAssetId { - fn from(value: Uuid) -> Self { - Self::Uuid(value) - } -} - // Cross Operations impl PartialEq for AssetId { @@ -436,11 +418,9 @@ mod tests { /// Simple utility to directly hash a value using a fixed hasher fn hash(data: &T) -> u64 { - use core::hash::Hasher; + use core::hash::BuildHasher; - let mut hasher = bevy_utils::AHasher::default(); - data.hash(&mut hasher); - hasher.finish() + bevy_utils::FixedHasher.hash_one(data) } /// Typed and Untyped `AssetIds` should be equivalent to each other and themselves diff --git a/crates/bevy_asset/src/io/embedded/embedded_watcher.rs b/crates/bevy_asset/src/io/embedded/embedded_watcher.rs index dd863d7cf9285d..cc97eb3cda83c0 100644 --- a/crates/bevy_asset/src/io/embedded/embedded_watcher.rs +++ b/crates/bevy_asset/src/io/embedded/embedded_watcher.rs @@ -5,7 +5,7 @@ use crate::io::{ }; use alloc::sync::Arc; use bevy_utils::{tracing::warn, Duration, HashMap}; -use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, FileIdMap}; +use notify_debouncer_full::{notify::RecommendedWatcher, Debouncer, RecommendedCache}; use parking_lot::RwLock; use std::{ fs::File, @@ -18,7 +18,7 @@ use std::{ /// This watcher will watch for changes to the "source files", read the contents of changed files from the file system /// and overwrite the initial static bytes of the file embedded in the binary with the new dynamically loaded bytes. pub struct EmbeddedWatcher { - _watcher: Debouncer, + _watcher: Debouncer, } impl EmbeddedWatcher { diff --git a/crates/bevy_asset/src/io/file/file_asset.rs b/crates/bevy_asset/src/io/file/file_asset.rs index 3c20702167af74..a7af0197e24a96 100644 --- a/crates/bevy_asset/src/io/file/file_asset.rs +++ b/crates/bevy_asset/src/io/file/file_asset.rs @@ -164,6 +164,12 @@ impl AssetWriter for FileAssetWriter { Ok(()) } + async fn create_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> { + let full_path = self.root_path.join(path); + async_fs::create_dir_all(full_path).await?; + Ok(()) + } + async fn remove_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> { let full_path = self.root_path.join(path); async_fs::remove_dir_all(full_path).await?; diff --git a/crates/bevy_asset/src/io/file/file_watcher.rs b/crates/bevy_asset/src/io/file/file_watcher.rs index 1da55fae976077..bb4cf109c32c10 100644 --- a/crates/bevy_asset/src/io/file/file_watcher.rs +++ b/crates/bevy_asset/src/io/file/file_watcher.rs @@ -9,9 +9,9 @@ use notify_debouncer_full::{ notify::{ self, event::{AccessKind, AccessMode, CreateKind, ModifyKind, RemoveKind, RenameMode}, - RecommendedWatcher, RecursiveMode, Watcher, + RecommendedWatcher, RecursiveMode, }, - DebounceEventResult, Debouncer, FileIdMap, + DebounceEventResult, Debouncer, RecommendedCache, }; use std::path::{Path, PathBuf}; @@ -21,7 +21,7 @@ use std::path::{Path, PathBuf}; /// This introduces a small delay in processing events, but it helps reduce event duplicates. A small delay is also necessary /// on some systems to avoid processing a change event before it has actually been applied. pub struct FileWatcher { - _watcher: Debouncer, + _watcher: Debouncer, } impl FileWatcher { @@ -73,7 +73,7 @@ pub(crate) fn new_asset_event_debouncer( root: PathBuf, debounce_wait_time: Duration, mut handler: impl FilesystemEventHandler, -) -> Result, notify::Error> { +) -> Result, notify::Error> { let root = super::get_base_path().join(root); let mut debouncer = new_debouncer( debounce_wait_time, @@ -245,8 +245,7 @@ pub(crate) fn new_asset_event_debouncer( } }, )?; - debouncer.watcher().watch(&root, RecursiveMode::Recursive)?; - debouncer.cache().add_root(&root, RecursiveMode::Recursive); + debouncer.watch(&root, RecursiveMode::Recursive)?; Ok(debouncer) } diff --git a/crates/bevy_asset/src/io/file/sync_file_asset.rs b/crates/bevy_asset/src/io/file/sync_file_asset.rs index 5887724736b471..cadea494929003 100644 --- a/crates/bevy_asset/src/io/file/sync_file_asset.rs +++ b/crates/bevy_asset/src/io/file/sync_file_asset.rs @@ -205,6 +205,12 @@ impl AssetWriter for FileAssetWriter { Ok(()) } + async fn create_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> { + let full_path = self.root_path.join(path); + std::fs::create_dir_all(full_path)?; + Ok(()) + } + async fn remove_directory<'a>(&'a self, path: &'a Path) -> Result<(), AssetWriterError> { let full_path = self.root_path.join(path); std::fs::remove_dir_all(full_path)?; diff --git a/crates/bevy_asset/src/io/gated.rs b/crates/bevy_asset/src/io/gated.rs index cb205f12a81bd8..388145a4686b05 100644 --- a/crates/bevy_asset/src/io/gated.rs +++ b/crates/bevy_asset/src/io/gated.rs @@ -44,7 +44,7 @@ impl GatedReader { /// Creates a new [`GatedReader`], which wraps the given `reader`. Also returns a [`GateOpener`] which /// can be used to open "path gates" for this [`GatedReader`]. pub fn new(reader: R) -> (Self, GateOpener) { - let gates = Arc::new(RwLock::new(HashMap::new())); + let gates = Arc::new(RwLock::new(HashMap::default())); ( Self { reader, diff --git a/crates/bevy_asset/src/io/mod.rs b/crates/bevy_asset/src/io/mod.rs index 47ca38b6851f78..0c4c0b1f00356c 100644 --- a/crates/bevy_asset/src/io/mod.rs +++ b/crates/bevy_asset/src/io/mod.rs @@ -23,6 +23,7 @@ pub use source::*; use alloc::sync::Arc; use bevy_utils::{BoxedFuture, ConditionalSendFuture}; +use core::future::Future; use core::{ mem::size_of, pin::Pin, @@ -37,7 +38,7 @@ use thiserror::Error; #[derive(Error, Debug, Clone)] pub enum AssetReaderError { /// Path not found. - #[error("Path not found: {0}")] + #[error("Path not found: {}", _0.display())] NotFound(PathBuf), /// Encountered an I/O error while loading an asset. @@ -118,6 +119,40 @@ impl AsyncSeekForward for Box { } } +/// Extension trait for [`AsyncSeekForward`]. +pub trait AsyncSeekForwardExt: AsyncSeekForward { + /// Seek by the provided `offset` in the forwards direction, using the [`AsyncSeekForward`] trait. + fn seek_forward(&mut self, offset: u64) -> SeekForwardFuture<'_, Self> + where + Self: Unpin, + { + SeekForwardFuture { + seeker: self, + offset, + } + } +} + +impl AsyncSeekForwardExt for R {} + +#[derive(Debug)] +#[must_use = "futures do nothing unless you `.await` or poll them"] +pub struct SeekForwardFuture<'a, S: Unpin + ?Sized> { + seeker: &'a mut S, + offset: u64, +} + +impl Unpin for SeekForwardFuture<'_, S> {} + +impl Future for SeekForwardFuture<'_, S> { + type Output = futures_lite::io::Result; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let offset = self.offset; + Pin::new(&mut *self.seeker).poll_seek_forward(cx, offset) + } +} + /// A type returned from [`AssetReader::read`], which is used to read the contents of a file /// (or virtual file) corresponding to an asset. /// @@ -347,6 +382,12 @@ pub trait AssetWriter: Send + Sync + 'static { old_path: &'a Path, new_path: &'a Path, ) -> impl ConditionalSendFuture>; + /// Creates a directory at the given path, including all parent directories if they do not + /// already exist. + fn create_directory<'a>( + &'a self, + path: &'a Path, + ) -> impl ConditionalSendFuture>; /// Removes the directory at the given path, including all assets _and_ directories in that directory. fn remove_directory<'a>( &'a self, @@ -423,6 +464,12 @@ pub trait ErasedAssetWriter: Send + Sync + 'static { old_path: &'a Path, new_path: &'a Path, ) -> BoxedFuture<'a, Result<(), AssetWriterError>>; + /// Creates a directory at the given path, including all parent directories if they do not + /// already exist. + fn create_directory<'a>( + &'a self, + path: &'a Path, + ) -> BoxedFuture<'a, Result<(), AssetWriterError>>; /// Removes the directory at the given path, including all assets _and_ directories in that directory. fn remove_directory<'a>( &'a self, @@ -486,6 +533,12 @@ impl ErasedAssetWriter for T { ) -> BoxedFuture<'a, Result<(), AssetWriterError>> { Box::pin(Self::rename_meta(self, old_path, new_path)) } + fn create_directory<'a>( + &'a self, + path: &'a Path, + ) -> BoxedFuture<'a, Result<(), AssetWriterError>> { + Box::pin(Self::create_directory(self, path)) + } fn remove_directory<'a>( &'a self, path: &'a Path, @@ -726,10 +779,7 @@ struct EmptyPathStream; impl Stream for EmptyPathStream { type Item = PathBuf; - fn poll_next( - self: Pin<&mut Self>, - _cx: &mut core::task::Context<'_>, - ) -> Poll> { + fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { Poll::Ready(None) } } diff --git a/crates/bevy_asset/src/io/source.rs b/crates/bevy_asset/src/io/source.rs index 4af5b377d28c88..c0bab2037f8e35 100644 --- a/crates/bevy_asset/src/io/source.rs +++ b/crates/bevy_asset/src/io/source.rs @@ -343,7 +343,7 @@ impl AssetSourceBuilders { /// Builds a new [`AssetSources`] collection. If `watch` is true, the unprocessed sources will watch for changes. /// If `watch_processed` is true, the processed sources will watch for changes. pub fn build_sources(&mut self, watch: bool, watch_processed: bool) -> AssetSources { - let mut sources = HashMap::new(); + let mut sources = >::default(); for (id, source) in &mut self.sources { if let Some(data) = source.build( AssetSourceId::Name(id.clone_owned()), @@ -587,7 +587,7 @@ impl AssetSources { AssetSourceId::Name(name) => self .sources .get(&name) - .ok_or_else(|| MissingAssetSourceError(AssetSourceId::Name(name))), + .ok_or(MissingAssetSourceError(AssetSourceId::Name(name))), } } diff --git a/crates/bevy_asset/src/io/wasm.rs b/crates/bevy_asset/src/io/wasm.rs index 65eb852c257a23..25a5d223cbb0b1 100644 --- a/crates/bevy_asset/src/io/wasm.rs +++ b/crates/bevy_asset/src/io/wasm.rs @@ -52,7 +52,7 @@ fn js_value_to_err(context: &str) -> impl FnOnce(JsValue) -> std::io::Error + '_ impl HttpWasmAssetReader { async fn fetch_bytes<'a>(&self, path: PathBuf) -> Result { - // The JS global scope includes a self-reference via a specialising name, which can be used to determine the type of global context available. + // The JS global scope includes a self-reference via a specializing name, which can be used to determine the type of global context available. let global: Global = js_sys::global().unchecked_into(); let promise = if !global.window().is_undefined() { let window: web_sys::Window = global.unchecked_into(); diff --git a/crates/bevy_asset/src/lib.rs b/crates/bevy_asset/src/lib.rs index 08d08c4b1c2dfa..d252946c3c186d 100644 --- a/crates/bevy_asset/src/lib.rs +++ b/crates/bevy_asset/src/lib.rs @@ -176,6 +176,7 @@ mod loader; mod loader_builders; mod path; mod reflect; +mod render_asset; mod server; pub use assets::*; @@ -192,6 +193,7 @@ pub use loader_builders::{ }; pub use path::*; pub use reflect::*; +pub use render_asset::*; pub use server::*; /// Rusty Object Notation, a crate used to serialize and deserialize bevy assets. @@ -312,7 +314,7 @@ impl Plugin for AssetPlugin { { let mut sources = app .world_mut() - .get_resource_or_insert_with::(Default::default); + .get_resource_or_init::(); sources.init_default_source( &self.file_path, (!matches!(self.mode, AssetMode::Unprocessed)) @@ -517,7 +519,7 @@ impl AssetApp for App { { let mut sources = self .world_mut() - .get_resource_or_insert_with(AssetSourceBuilders::default); + .get_resource_or_init::(); sources.insert(id, source); } @@ -615,8 +617,7 @@ mod tests { }, loader::{AssetLoader, LoadContext}, Asset, AssetApp, AssetEvent, AssetId, AssetLoadError, AssetLoadFailedEvent, AssetPath, - AssetPlugin, AssetServer, Assets, DependencyLoadState, LoadState, - RecursiveDependencyLoadState, + AssetPlugin, AssetServer, Assets, }; use alloc::sync::Arc; use bevy_app::{App, Update}; @@ -898,23 +899,14 @@ mod tests { let asset_server = app.world().resource::().clone(); let handle: Handle = asset_server.load(a_path); let a_id = handle.id(); - let entity = app.world_mut().spawn(handle).id(); app.update(); { let a_text = get::(app.world(), a_id); let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); assert!(a_text.is_none(), "a's asset should not exist yet"); - assert_eq!(a_load, LoadState::Loading, "a should still be loading"); - assert_eq!( - a_deps, - DependencyLoadState::Loading, - "a deps should still be loading" - ); - assert_eq!( - a_rec_deps, - RecursiveDependencyLoadState::Loading, - "a recursive deps should still be loading" - ); + assert!(a_load.is_loading()); + assert!(a_deps.is_loading()); + assert!(a_rec_deps.is_loading()); } // Allow "a" to load ... wait for it to finish loading and validate results @@ -925,25 +917,25 @@ mod tests { let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); assert_eq!(a_text.text, "a"); assert_eq!(a_text.dependencies.len(), 2); - assert_eq!(a_load, LoadState::Loaded, "a is loaded"); - assert_eq!(a_deps, DependencyLoadState::Loading); - assert_eq!(a_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(a_load.is_loaded()); + assert!(a_deps.is_loading()); + assert!(a_rec_deps.is_loading()); let b_id = a_text.dependencies[0].id(); let b_text = get::(world, b_id); let (b_load, b_deps, b_rec_deps) = asset_server.get_load_states(b_id).unwrap(); assert!(b_text.is_none(), "b component should not exist yet"); - assert_eq!(b_load, LoadState::Loading); - assert_eq!(b_deps, DependencyLoadState::Loading); - assert_eq!(b_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(b_load.is_loading()); + assert!(b_deps.is_loading()); + assert!(b_rec_deps.is_loading()); let c_id = a_text.dependencies[1].id(); let c_text = get::(world, c_id); let (c_load, c_deps, c_rec_deps) = asset_server.get_load_states(c_id).unwrap(); assert!(c_text.is_none(), "c component should not exist yet"); - assert_eq!(c_load, LoadState::Loading); - assert_eq!(c_deps, DependencyLoadState::Loading); - assert_eq!(c_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(c_load.is_loading()); + assert!(c_deps.is_loading()); + assert!(c_rec_deps.is_loading()); Some(()) }); @@ -955,25 +947,25 @@ mod tests { let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); assert_eq!(a_text.text, "a"); assert_eq!(a_text.dependencies.len(), 2); - assert_eq!(a_load, LoadState::Loaded); - assert_eq!(a_deps, DependencyLoadState::Loading); - assert_eq!(a_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(a_load.is_loaded()); + assert!(a_deps.is_loading()); + assert!(a_rec_deps.is_loading()); let b_id = a_text.dependencies[0].id(); let b_text = get::(world, b_id)?; let (b_load, b_deps, b_rec_deps) = asset_server.get_load_states(b_id).unwrap(); assert_eq!(b_text.text, "b"); - assert_eq!(b_load, LoadState::Loaded); - assert_eq!(b_deps, DependencyLoadState::Loaded); - assert_eq!(b_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(b_load.is_loaded()); + assert!(b_deps.is_loaded()); + assert!(b_rec_deps.is_loaded()); let c_id = a_text.dependencies[1].id(); let c_text = get::(world, c_id); let (c_load, c_deps, c_rec_deps) = asset_server.get_load_states(c_id).unwrap(); assert!(c_text.is_none(), "c component should not exist yet"); - assert_eq!(c_load, LoadState::Loading); - assert_eq!(c_deps, DependencyLoadState::Loading); - assert_eq!(c_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(c_load.is_loading()); + assert!(c_deps.is_loading()); + assert!(c_rec_deps.is_loading()); Some(()) }); @@ -990,31 +982,29 @@ mod tests { assert_eq!(a_text.text, "a"); assert_eq!(a_text.embedded, ""); assert_eq!(a_text.dependencies.len(), 2); - assert_eq!(a_load, LoadState::Loaded); + assert!(a_load.is_loaded()); let b_id = a_text.dependencies[0].id(); let b_text = get::(world, b_id)?; let (b_load, b_deps, b_rec_deps) = asset_server.get_load_states(b_id).unwrap(); assert_eq!(b_text.text, "b"); assert_eq!(b_text.embedded, ""); - assert_eq!(b_load, LoadState::Loaded); - assert_eq!(b_deps, DependencyLoadState::Loaded); - assert_eq!(b_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(b_load.is_loaded()); + assert!(b_deps.is_loaded()); + assert!(b_rec_deps.is_loaded()); let c_id = a_text.dependencies[1].id(); let c_text = get::(world, c_id)?; let (c_load, c_deps, c_rec_deps) = asset_server.get_load_states(c_id).unwrap(); assert_eq!(c_text.text, "c"); assert_eq!(c_text.embedded, "ab"); - assert_eq!(c_load, LoadState::Loaded); - assert_eq!( - c_deps, - DependencyLoadState::Loading, + assert!(c_load.is_loaded()); + assert!( + c_deps.is_loading(), "c deps should not be loaded yet because d has not loaded" ); - assert_eq!( - c_rec_deps, - RecursiveDependencyLoadState::Loading, + assert!( + c_rec_deps.is_loading(), "c rec deps should not be loaded yet because d has not loaded" ); @@ -1024,26 +1014,24 @@ mod tests { assert_eq!(sub_text.text, "hello"); let (sub_text_load, sub_text_deps, sub_text_rec_deps) = asset_server.get_load_states(sub_text_id).unwrap(); - assert_eq!(sub_text_load, LoadState::Loaded); - assert_eq!(sub_text_deps, DependencyLoadState::Loaded); - assert_eq!(sub_text_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(sub_text_load.is_loaded()); + assert!(sub_text_deps.is_loaded()); + assert!(sub_text_rec_deps.is_loaded()); let d_id = c_text.dependencies[0].id(); let d_text = get::(world, d_id); let (d_load, d_deps, d_rec_deps) = asset_server.get_load_states(d_id).unwrap(); assert!(d_text.is_none(), "d component should not exist yet"); - assert_eq!(d_load, LoadState::Loading); - assert_eq!(d_deps, DependencyLoadState::Loading); - assert_eq!(d_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(d_load.is_loading()); + assert!(d_deps.is_loading()); + assert!(d_rec_deps.is_loading()); - assert_eq!( - a_deps, - DependencyLoadState::Loaded, + assert!( + a_deps.is_loaded(), "If c has been loaded, the a deps should all be considered loaded" ); - assert_eq!( - a_rec_deps, - RecursiveDependencyLoadState::Loading, + assert!( + a_rec_deps.is_loading(), "d is not loaded, so a's recursive deps should still be loading" ); world.insert_resource(IdResults { b_id, c_id, d_id }); @@ -1066,17 +1054,16 @@ mod tests { assert_eq!(d_text.text, "d"); assert_eq!(d_text.embedded, ""); - assert_eq!(c_load, LoadState::Loaded); - assert_eq!(c_deps, DependencyLoadState::Loaded); - assert_eq!(c_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(c_load.is_loaded()); + assert!(c_deps.is_loaded()); + assert!(c_rec_deps.is_loaded()); - assert_eq!(d_load, LoadState::Loaded); - assert_eq!(d_deps, DependencyLoadState::Loaded); - assert_eq!(d_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(d_load.is_loaded()); + assert!(d_deps.is_loaded()); + assert!(d_rec_deps.is_loaded()); - assert_eq!( - a_rec_deps, - RecursiveDependencyLoadState::Loaded, + assert!( + a_rec_deps.is_loaded(), "d is loaded, so a's recursive deps should be loaded" ); Some(()) @@ -1088,7 +1075,8 @@ mod tests { a.text = "Changed".to_string(); } - app.world_mut().despawn(entity); + drop(handle); + app.update(); assert_eq!( app.world().resource::>().len(), @@ -1223,7 +1211,6 @@ mod tests { ); } - app.world_mut().spawn(handle); gate_opener.open(a_path); gate_opener.open(b_path); gate_opener.open(c_path); @@ -1244,49 +1231,37 @@ mod tests { let d_id = c_text.dependencies[0].id(); let d_text = get::(world, d_id); let (d_load, d_deps, d_rec_deps) = asset_server.get_load_states(d_id).unwrap(); - if !matches!(d_load, LoadState::Failed(_)) { + + if !d_load.is_failed() { // wait until d has exited the loading state return None; } assert!(d_text.is_none()); - assert!(matches!(d_load, LoadState::Failed(_))); - assert!(matches!(d_deps, DependencyLoadState::Failed(_))); - assert!(matches!( - d_rec_deps, - RecursiveDependencyLoadState::Failed(_) - )); + assert!(d_load.is_failed()); + assert!(d_deps.is_failed()); + assert!(d_rec_deps.is_failed()); assert_eq!(a_text.text, "a"); - assert_eq!(a_load, LoadState::Loaded); - assert_eq!(a_deps, DependencyLoadState::Loaded); - assert!(matches!( - a_rec_deps, - RecursiveDependencyLoadState::Failed(_) - )); + assert!(a_load.is_loaded()); + assert!(a_deps.is_loaded()); + assert!(a_rec_deps.is_failed()); assert_eq!(b_text.text, "b"); - assert_eq!(b_load, LoadState::Loaded); - assert_eq!(b_deps, DependencyLoadState::Loaded); - assert_eq!(b_rec_deps, RecursiveDependencyLoadState::Loaded); + assert!(b_load.is_loaded()); + assert!(b_deps.is_loaded()); + assert!(b_rec_deps.is_loaded()); assert_eq!(c_text.text, "c"); - assert_eq!(c_load, LoadState::Loaded); - assert!(matches!(c_deps, DependencyLoadState::Failed(_))); - assert!(matches!( - c_rec_deps, - RecursiveDependencyLoadState::Failed(_) - )); - - assert_eq!(asset_server.load_state(a_id), LoadState::Loaded); - assert_eq!( - asset_server.dependency_load_state(a_id), - DependencyLoadState::Loaded - ); - assert!(matches!( - asset_server.recursive_dependency_load_state(a_id), - RecursiveDependencyLoadState::Failed(_) - )); + assert!(c_load.is_loaded()); + assert!(c_deps.is_failed()); + assert!(c_rec_deps.is_failed()); + + assert!(asset_server.load_state(a_id).is_loaded()); + assert!(asset_server.dependency_load_state(a_id).is_loaded()); + assert!(asset_server + .recursive_dependency_load_state(a_id) + .is_failed()); assert!(asset_server.is_loaded(a_id)); assert!(asset_server.is_loaded_with_direct_dependencies(a_id)); @@ -1343,15 +1318,14 @@ mod tests { let asset_server = app.world().resource::().clone(); let handle: Handle = asset_server.load(a_path); let a_id = handle.id(); - app.world_mut().spawn(handle); gate_opener.open(a_path); run_app_until(&mut app, |world| { let _a_text = get::(world, a_id)?; let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); - assert_eq!(a_load, LoadState::Loaded); - assert_eq!(a_deps, DependencyLoadState::Loading); - assert_eq!(a_rec_deps, RecursiveDependencyLoadState::Loading); + assert!(a_load.is_loaded()); + assert!(a_deps.is_loading()); + assert!(a_rec_deps.is_loading()); Some(()) }); @@ -1361,18 +1335,15 @@ mod tests { let b_id = a_text.dependencies[0].id(); let (b_load, _b_deps, _b_rec_deps) = asset_server.get_load_states(b_id).unwrap(); - if !matches!(b_load, LoadState::Failed(_)) { + if !b_load.is_failed() { // wait until b fails return None; } let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); - assert_eq!(a_load, LoadState::Loaded); - assert!(matches!(a_deps, DependencyLoadState::Failed(_))); - assert!(matches!( - a_rec_deps, - RecursiveDependencyLoadState::Failed(_) - )); + assert!(a_load.is_loaded()); + assert!(a_deps.is_failed()); + assert!(a_rec_deps.is_failed()); Some(()) }); @@ -1384,13 +1355,13 @@ mod tests { let _c_text = get::(world, c_id)?; let (a_load, a_deps, a_rec_deps) = asset_server.get_load_states(a_id).unwrap(); - assert_eq!(a_load, LoadState::Loaded); + assert!(a_load.is_loaded()); assert!( - matches!(a_deps, DependencyLoadState::Failed(_)), + a_deps.is_failed(), "Successful dependency load should not overwrite a previous failure" ); assert!( - matches!(a_rec_deps, RecursiveDependencyLoadState::Failed(_)), + a_rec_deps.is_failed(), "Successful dependency load should not overwrite a previous failure" ); Some(()) @@ -1681,7 +1652,7 @@ mod tests { // Check what just failed for error in errors.read() { let (load_state, _, _) = server.get_load_states(error.id).unwrap(); - assert!(matches!(load_state, LoadState::Failed(_))); + assert!(load_state.is_failed()); assert_eq!(*error.path.source(), AssetSourceId::Name("unstable".into())); match &error.error { AssetLoadError::AssetReaderError(read_error) => match read_error { @@ -1744,8 +1715,6 @@ mod tests { let a_handle: Handle = asset_server.load(a_path); let a_id = a_handle.id(); - app.world_mut().spawn(a_handle); - run_app_until(&mut app, |world| { let tracker = world.resource::(); match tracker.finished_asset { diff --git a/crates/bevy_asset/src/loader.rs b/crates/bevy_asset/src/loader.rs index e1315a96e26b27..cbc5aac3c110c7 100644 --- a/crates/bevy_asset/src/loader.rs +++ b/crates/bevy_asset/src/loader.rs @@ -153,7 +153,7 @@ pub struct LoadedAsset { impl LoadedAsset { /// Create a new loaded asset. This will use [`VisitAssetDependencies`](crate::VisitAssetDependencies) to populate `dependencies`. pub fn new_with_dependencies(value: A, meta: Option>) -> Self { - let mut dependencies = HashSet::new(); + let mut dependencies = >::default(); value.visit_dependencies(&mut |id| { dependencies.insert(id); }); @@ -367,7 +367,7 @@ impl<'a> LoadContext<'a> { /// (i.to_string(), labeled.finish(Image::default(), None)) /// })); /// } - + /// /// for handle in handles { /// let (label, loaded_asset) = handle.join().unwrap(); /// load_context.add_loaded_labeled_asset(label, loaded_asset); @@ -583,10 +583,9 @@ pub enum ReadAssetBytesError { #[error(transparent)] MissingProcessedAssetReaderError(#[from] MissingProcessedAssetReaderError), /// Encountered an I/O error while loading an asset. - #[error("Encountered an io error while loading asset at `{path}`: {source}")] + #[error("Encountered an io error while loading asset at `{}`: {source}", path.display())] Io { path: PathBuf, - #[source] source: std::io::Error, }, #[error("The LoadContext for this read_asset_bytes call requires hash metadata, but it was not provided. This is likely an internal implementation error.")] diff --git a/crates/bevy_asset/src/processor/mod.rs b/crates/bevy_asset/src/processor/mod.rs index 0030b890d851c6..c74fd80b5673d9 100644 --- a/crates/bevy_asset/src/processor/mod.rs +++ b/crates/bevy_asset/src/processor/mod.rs @@ -670,7 +670,7 @@ impl AssetProcessor { } for dependency in dependencies { - asset_infos.add_dependant(&dependency, asset_path.clone()); + asset_infos.add_dependent(&dependency, asset_path.clone()); } } } @@ -859,7 +859,7 @@ impl AssetProcessor { } } } - // Note: this lock must remain alive until all processed asset asset and meta writes have finished (or failed) + // Note: this lock must remain alive until all processed asset and meta writes have finished (or failed) // See ProcessedAssetInfo::file_transaction_lock docs for more info let _transaction_lock = { let mut infos = self.data.asset_infos.write().await; @@ -1137,7 +1137,7 @@ pub enum ProcessStatus { pub(crate) struct ProcessorAssetInfo { processed_info: Option, /// Paths of assets that depend on this asset when they are being processed. - dependants: HashSet>, + dependents: HashSet>, status: Option, /// A lock that controls read/write access to processed asset files. The lock is shared for both the asset bytes and the meta bytes. /// _This lock must be locked whenever a read or write to processed assets occurs_ @@ -1161,7 +1161,7 @@ impl Default for ProcessorAssetInfo { status_sender.set_overflow(true); Self { processed_info: Default::default(), - dependants: Default::default(), + dependents: Default::default(), file_transaction_lock: Default::default(), status: None, status_sender, @@ -1187,13 +1187,13 @@ pub struct ProcessorAssetInfos { /// The "current" in memory view of the asset space. During processing, if path does not exist in this, it should /// be considered non-existent. /// NOTE: YOU MUST USE `Self::get_or_insert` or `Self::insert` TO ADD ITEMS TO THIS COLLECTION TO ENSURE - /// `non_existent_dependants` DATA IS CONSUMED + /// `non_existent_dependents` DATA IS CONSUMED infos: HashMap, ProcessorAssetInfo>, - /// Dependants for assets that don't exist. This exists to track "dangling" asset references due to deleted / missing files. - /// If the dependant asset is added, it can "resolve" these dependencies and re-compute those assets. + /// Dependents for assets that don't exist. This exists to track "dangling" asset references due to deleted / missing files. + /// If the dependent asset is added, it can "resolve" these dependencies and re-compute those assets. /// Therefore this _must_ always be consistent with the `infos` data. If a new asset is added to `infos`, it should - /// check this maps for dependencies and add them. If an asset is removed, it should update the dependants here. - non_existent_dependants: HashMap, HashSet>>, + /// check this maps for dependencies and add them. If an asset is removed, it should update the dependents here. + non_existent_dependents: HashMap, HashSet>>, check_reprocess_queue: VecDeque>, } @@ -1201,9 +1201,9 @@ impl ProcessorAssetInfos { fn get_or_insert(&mut self, asset_path: AssetPath<'static>) -> &mut ProcessorAssetInfo { self.infos.entry(asset_path.clone()).or_insert_with(|| { let mut info = ProcessorAssetInfo::default(); - // track existing dependants by resolving existing "hanging" dependants. - if let Some(dependants) = self.non_existent_dependants.remove(&asset_path) { - info.dependants = dependants; + // track existing dependents by resolving existing "hanging" dependents. + if let Some(dependents) = self.non_existent_dependents.remove(&asset_path) { + info.dependents = dependents; } info }) @@ -1217,15 +1217,15 @@ impl ProcessorAssetInfos { self.infos.get_mut(asset_path) } - fn add_dependant(&mut self, asset_path: &AssetPath<'static>, dependant: AssetPath<'static>) { + fn add_dependent(&mut self, asset_path: &AssetPath<'static>, dependent: AssetPath<'static>) { if let Some(info) = self.get_mut(asset_path) { - info.dependants.insert(dependant); + info.dependents.insert(dependent); } else { - let dependants = self - .non_existent_dependants + let dependents = self + .non_existent_dependents .entry(asset_path.clone()) .or_default(); - dependants.insert(dependant); + dependents.insert(dependent); } } @@ -1238,7 +1238,7 @@ impl ProcessorAssetInfos { match result { Ok(ProcessResult::Processed(processed_info)) => { debug!("Finished processing \"{:?}\"", asset_path); - // clean up old dependants + // clean up old dependents let old_processed_info = self .infos .get_mut(&asset_path) @@ -1247,15 +1247,15 @@ impl ProcessorAssetInfos { self.clear_dependencies(&asset_path, old_processed_info); } - // populate new dependants + // populate new dependents for process_dependency_info in &processed_info.process_dependencies { - self.add_dependant(&process_dependency_info.path, asset_path.to_owned()); + self.add_dependent(&process_dependency_info.path, asset_path.to_owned()); } let info = self.get_or_insert(asset_path); info.processed_info = Some(processed_info); info.update_status(ProcessStatus::Processed).await; - let dependants = info.dependants.iter().cloned().collect::>(); - for path in dependants { + let dependents = info.dependents.iter().cloned().collect::>(); + for path in dependents { self.check_reprocess_queue.push_back(path); } } @@ -1298,7 +1298,7 @@ impl ProcessorAssetInfos { full_hash: AssetHash::default(), process_dependencies: vec![], }); - self.add_dependant(dependency.path(), asset_path.to_owned()); + self.add_dependent(dependency.path(), asset_path.to_owned()); } let info = self.get_mut(&asset_path).expect("info should exist"); @@ -1319,13 +1319,13 @@ impl ProcessorAssetInfos { .broadcast(ProcessStatus::NonExistent) .await .unwrap(); - if !info.dependants.is_empty() { + if !info.dependents.is_empty() { error!( "The asset at {asset_path} was removed, but it had assets that depend on it to be processed. Consider updating the path in the following assets: {:?}", - info.dependants + info.dependents ); - self.non_existent_dependants - .insert(asset_path.clone(), info.dependants); + self.non_existent_dependents + .insert(asset_path.clone(), info.dependents); } } } @@ -1334,31 +1334,31 @@ impl ProcessorAssetInfos { async fn rename(&mut self, old: &AssetPath<'static>, new: &AssetPath<'static>) { let info = self.infos.remove(old); if let Some(mut info) = info { - if !info.dependants.is_empty() { + if !info.dependents.is_empty() { // TODO: We can't currently ensure "moved" folders with relative paths aren't broken because AssetPath // doesn't distinguish between absolute and relative paths. We have "erased" relativeness. In the short term, // we could do "remove everything in a folder and re-add", but that requires full rebuilds / destroying the cache. // If processors / loaders could enumerate dependencies, we could check if the new deps line up with a rename. // If deps encoded "relativeness" as part of loading, that would also work (this seems like the right call). - // TODO: it would be nice to log an error here for dependants that aren't also being moved + fixed. + // TODO: it would be nice to log an error here for dependents that aren't also being moved + fixed. // (see the remove impl). error!( "The asset at {old} was removed, but it had assets that depend on it to be processed. Consider updating the path in the following assets: {:?}", - info.dependants + info.dependents ); - self.non_existent_dependants - .insert(old.clone(), core::mem::take(&mut info.dependants)); + self.non_existent_dependents + .insert(old.clone(), core::mem::take(&mut info.dependents)); } if let Some(processed_info) = &info.processed_info { - // Update "dependant" lists for this asset's "process dependencies" to use new path. + // Update "dependent" lists for this asset's "process dependencies" to use new path. for dep in &processed_info.process_dependencies { if let Some(info) = self.infos.get_mut(&dep.path) { - info.dependants.remove(old); - info.dependants.insert(new.clone()); - } else if let Some(dependants) = self.non_existent_dependants.get_mut(&dep.path) + info.dependents.remove(old); + info.dependents.insert(new.clone()); + } else if let Some(dependents) = self.non_existent_dependents.get_mut(&dep.path) { - dependants.remove(old); - dependants.insert(new.clone()); + dependents.remove(old); + dependents.insert(new.clone()); } } } @@ -1367,7 +1367,7 @@ impl ProcessorAssetInfos { .broadcast(ProcessStatus::NonExistent) .await .unwrap(); - let dependants: Vec> = { + let dependents: Vec> = { let new_info = self.get_or_insert(new.clone()); new_info.processed_info = info.processed_info; new_info.status = info.status; @@ -1375,13 +1375,13 @@ impl ProcessorAssetInfos { if let Some(status) = new_info.status { new_info.status_sender.broadcast(status).await.unwrap(); } - new_info.dependants.iter().cloned().collect() + new_info.dependents.iter().cloned().collect() }; // Queue the asset for a reprocess check, in case it needs new meta. self.check_reprocess_queue.push_back(new.clone()); - for dependant in dependants { - // Queue dependants for reprocessing because they might have been waiting for this asset. - self.check_reprocess_queue.push_back(dependant); + for dependent in dependents { + // Queue dependents for reprocessing because they might have been waiting for this asset. + self.check_reprocess_queue.push_back(dependent); } } } @@ -1389,11 +1389,11 @@ impl ProcessorAssetInfos { fn clear_dependencies(&mut self, asset_path: &AssetPath<'static>, removed_info: ProcessedInfo) { for old_load_dep in removed_info.process_dependencies { if let Some(info) = self.infos.get_mut(&old_load_dep.path) { - info.dependants.remove(asset_path); - } else if let Some(dependants) = - self.non_existent_dependants.get_mut(&old_load_dep.path) + info.dependents.remove(asset_path); + } else if let Some(dependents) = + self.non_existent_dependents.get_mut(&old_load_dep.path) { - dependants.remove(asset_path); + dependents.remove(asset_path); } } } @@ -1420,5 +1420,5 @@ pub enum InitializeError { #[error(transparent)] FailedToReadDestinationPaths(AssetReaderError), #[error("Failed to validate asset log: {0}")] - ValidateLogError(ValidateLogError), + ValidateLogError(#[from] ValidateLogError), } diff --git a/crates/bevy_asset/src/processor/process.rs b/crates/bevy_asset/src/processor/process.rs index 785b23f99c8b48..5b084de8495061 100644 --- a/crates/bevy_asset/src/processor/process.rs +++ b/crates/bevy_asset/src/processor/process.rs @@ -133,13 +133,16 @@ pub enum ProcessError { #[error(transparent)] MissingAssetLoaderForTypeName(#[from] MissingAssetLoaderForTypeNameError), #[error("The processor '{0}' does not exist")] + #[from(ignore)] MissingProcessor(String), #[error("Encountered an AssetReader error for '{path}': {err}")] + #[from(ignore)] AssetReaderError { path: AssetPath<'static>, err: AssetReaderError, }, #[error("Encountered an AssetWriter error for '{path}': {err}")] + #[from(ignore)] AssetWriterError { path: AssetPath<'static>, err: AssetWriterError, @@ -151,6 +154,7 @@ pub enum ProcessError { #[error(transparent)] MissingProcessedAssetWriterError(#[from] MissingProcessedAssetWriterError), #[error("Failed to read asset metadata for {path}: {err}")] + #[from(ignore)] ReadAssetMetaError { path: AssetPath<'static>, err: AssetReaderError, @@ -162,8 +166,10 @@ pub enum ProcessError { #[error("The wrong meta type was passed into a processor. This is probably an internal implementation error.")] WrongMetaType, #[error("Encountered an error while saving the asset: {0}")] - AssetSaveError(#[from] Box), + #[from(ignore)] + AssetSaveError(Box), #[error("Encountered an error while transforming the asset: {0}")] + #[from(ignore)] AssetTransformError(Box), #[error("Assets without extensions are not supported.")] ExtensionRequired, diff --git a/crates/bevy_asset/src/render_asset.rs b/crates/bevy_asset/src/render_asset.rs new file mode 100644 index 00000000000000..3bbc3dfd484586 --- /dev/null +++ b/crates/bevy_asset/src/render_asset.rs @@ -0,0 +1,49 @@ +use bevy_reflect::{Reflect, ReflectDeserialize, ReflectSerialize}; +use serde::{Deserialize, Serialize}; + +bitflags::bitflags! { + /// Defines where the asset will be used. + /// + /// If an asset is set to the `RENDER_WORLD` but not the `MAIN_WORLD`, the asset will be + /// unloaded from the asset server once it's been extracted and prepared in the render world. + /// + /// Unloading the asset saves on memory, as for most cases it is no longer necessary to keep + /// it in RAM once it's been uploaded to the GPU's VRAM. However, this means you can no longer + /// access the asset from the CPU (via the `Assets` resource) once unloaded (without re-loading it). + /// + /// If you never need access to the asset from the CPU past the first frame it's loaded on, + /// or only need very infrequent access, then set this to `RENDER_WORLD`. Otherwise, set this to + /// `RENDER_WORLD | MAIN_WORLD`. + /// + /// If you have an asset that doesn't actually need to end up in the render world, like an Image + /// that will be decoded into another Image asset, use `MAIN_WORLD` only. + /// + /// ## Platform-specific + /// + /// On Wasm, it is not possible for now to free reserved memory. To control memory usage, load assets + /// in sequence and unload one before loading the next. See this + /// [discussion about memory management](https://github.com/WebAssembly/design/issues/1397) for more + /// details. + #[repr(transparent)] + #[derive(Serialize, Deserialize, Hash, Clone, Copy, PartialEq, Eq, Debug, Reflect)] + #[reflect(opaque)] + #[reflect(Serialize, Deserialize, Hash, PartialEq, Debug)] + pub struct RenderAssetUsages: u8 { + const MAIN_WORLD = 1 << 0; + const RENDER_WORLD = 1 << 1; + } +} + +impl Default for RenderAssetUsages { + /// Returns the default render asset usage flags: + /// `RenderAssetUsages::MAIN_WORLD | RenderAssetUsages::RENDER_WORLD` + /// + /// This default configuration ensures the asset persists in the main world, even after being prepared for rendering. + /// + /// If your asset does not change, consider using `RenderAssetUsages::RENDER_WORLD` exclusively. This will cause + /// the asset to be unloaded from the main world once it has been prepared for rendering. If the asset does not need + /// to reach the render world at all, use `RenderAssetUsages::MAIN_WORLD` exclusively. + fn default() -> Self { + RenderAssetUsages::MAIN_WORLD | RenderAssetUsages::RENDER_WORLD + } +} diff --git a/crates/bevy_asset/src/server/info.rs b/crates/bevy_asset/src/server/info.rs index 85b3d02fa584ac..898b3a76ec46f1 100644 --- a/crates/bevy_asset/src/server/info.rs +++ b/crates/bevy_asset/src/server/info.rs @@ -8,7 +8,7 @@ use alloc::sync::{Arc, Weak}; use bevy_ecs::world::World; use bevy_tasks::Task; use bevy_utils::{tracing::warn, Entry, HashMap, HashSet, TypeIdMap}; -use core::any::TypeId; +use core::{any::TypeId, task::Waker}; use crossbeam_channel::Sender; use either::Either; use thiserror::Error; @@ -24,8 +24,8 @@ pub(crate) struct AssetInfo { failed_dependencies: HashSet, loading_rec_dependencies: HashSet, failed_rec_dependencies: HashSet, - dependants_waiting_on_load: HashSet, - dependants_waiting_on_recursive_dep_load: HashSet, + dependents_waiting_on_load: HashSet, + dependents_waiting_on_recursive_dep_load: HashSet, /// The asset paths required to load this asset. Hashes will only be set for processed assets. /// This is set using the value from [`LoadedAsset`]. /// This will only be populated if [`AssetInfos::watching_for_changes`] is set to `true` to @@ -36,6 +36,8 @@ pub(crate) struct AssetInfo { /// The number of handle drops to skip for this asset. /// See usage (and comments) in `get_or_create_path_handle` for context. handle_drops_to_skip: usize, + /// List of tasks waiting for this asset to complete loading + pub(crate) waiting_tasks: Vec, } impl AssetInfo { @@ -51,9 +53,10 @@ impl AssetInfo { loading_rec_dependencies: HashSet::default(), failed_rec_dependencies: HashSet::default(), loader_dependencies: HashMap::default(), - dependants_waiting_on_load: HashSet::default(), - dependants_waiting_on_recursive_dep_load: HashSet::default(), + dependents_waiting_on_load: HashSet::default(), + dependents_waiting_on_recursive_dep_load: HashSet::default(), handle_drops_to_skip: 0, + waiting_tasks: Vec::new(), } } } @@ -62,12 +65,12 @@ impl AssetInfo { pub(crate) struct AssetInfos { path_to_id: HashMap, TypeIdMap>, infos: HashMap, - /// If set to `true`, this informs [`AssetInfos`] to track data relevant to watching for changes (such as `load_dependants`) + /// If set to `true`, this informs [`AssetInfos`] to track data relevant to watching for changes (such as `load_dependents`) /// This should only be set at startup. pub(crate) watching_for_changes: bool, /// Tracks assets that depend on the "key" asset path inside their asset loaders ("loader dependencies") /// This should only be set when watching for changes to avoid unnecessary work. - pub(crate) loader_dependants: HashMap, HashSet>>, + pub(crate) loader_dependents: HashMap, HashSet>>, /// Tracks living labeled assets for a given source asset. /// This should only be set when watching for changes to avoid unnecessary work. pub(crate) living_labeled_assets: HashMap, HashSet>>, @@ -369,7 +372,7 @@ impl AssetInfos { Self::process_handle_drop_internal( &mut self.infos, &mut self.path_to_id, - &mut self.loader_dependants, + &mut self.loader_dependents, &mut self.living_labeled_assets, &mut self.pending_tasks, self.watching_for_changes, @@ -377,7 +380,7 @@ impl AssetInfos { ) } - /// Updates [`AssetInfo`] / load state for an asset that has finished loading (and relevant dependencies / dependants). + /// Updates [`AssetInfo`] / load state for an asset that has finished loading (and relevant dependencies / dependents). pub(crate) fn process_asset_load( &mut self, loaded_asset_id: UntypedAssetId, @@ -392,10 +395,10 @@ impl AssetInfos { loaded_asset.value.insert(loaded_asset_id, world); let mut loading_deps = loaded_asset.dependencies; - let mut failed_deps = HashSet::new(); + let mut failed_deps = >::default(); let mut dep_error = None; let mut loading_rec_deps = loading_deps.clone(); - let mut failed_rec_deps = HashSet::new(); + let mut failed_rec_deps = >::default(); let mut rec_dep_error = None; loading_deps.retain(|dep_id| { if let Some(dep_info) = self.get_mut(*dep_id) { @@ -404,7 +407,7 @@ impl AssetInfos { | RecursiveDependencyLoadState::NotLoaded => { // If dependency is loading, wait for it. dep_info - .dependants_waiting_on_recursive_dep_load + .dependents_waiting_on_recursive_dep_load .insert(loaded_asset_id); } RecursiveDependencyLoadState::Loaded => { @@ -422,7 +425,7 @@ impl AssetInfos { match dep_info.load_state { LoadState::NotLoaded | LoadState::Loading => { // If dependency is loading, wait for it. - dep_info.dependants_waiting_on_load.insert(loaded_asset_id); + dep_info.dependents_waiting_on_load.insert(loaded_asset_id); true } LoadState::Loaded => { @@ -466,7 +469,7 @@ impl AssetInfos { (_loading, _failed) => RecursiveDependencyLoadState::Failed(rec_dep_error.unwrap()), }; - let (dependants_waiting_on_load, dependants_waiting_on_rec_load) = { + let (dependents_waiting_on_load, dependents_waiting_on_rec_load) = { let watching_for_changes = self.watching_for_changes; // if watching for changes, track reverse loader dependencies for hot reloading if watching_for_changes { @@ -476,11 +479,11 @@ impl AssetInfos { .expect("Asset info should always exist at this point"); if let Some(asset_path) = &info.path { for loader_dependency in loaded_asset.loader_dependencies.keys() { - let dependants = self - .loader_dependants + let dependents = self + .loader_dependents .entry(loader_dependency.clone()) .or_default(); - dependants.insert(asset_path.clone()); + dependents.insert(asset_path.clone()); } } } @@ -498,49 +501,45 @@ impl AssetInfos { info.loader_dependencies = loaded_asset.loader_dependencies; } - let dependants_waiting_on_rec_load = if matches!( - rec_dep_load_state, - RecursiveDependencyLoadState::Loaded | RecursiveDependencyLoadState::Failed(_) - ) { - Some(core::mem::take( - &mut info.dependants_waiting_on_recursive_dep_load, - )) - } else { - None - }; + let dependents_waiting_on_rec_load = + if rec_dep_load_state.is_loaded() || rec_dep_load_state.is_failed() { + Some(core::mem::take( + &mut info.dependents_waiting_on_recursive_dep_load, + )) + } else { + None + }; ( - core::mem::take(&mut info.dependants_waiting_on_load), - dependants_waiting_on_rec_load, + core::mem::take(&mut info.dependents_waiting_on_load), + dependents_waiting_on_rec_load, ) }; - for id in dependants_waiting_on_load { + for id in dependents_waiting_on_load { if let Some(info) = self.get_mut(id) { info.loading_dependencies.remove(&loaded_asset_id); - if info.loading_dependencies.is_empty() - && !matches!(info.dep_load_state, DependencyLoadState::Failed(_)) - { + if info.loading_dependencies.is_empty() && !info.dep_load_state.is_failed() { // send dependencies loaded event info.dep_load_state = DependencyLoadState::Loaded; } } } - if let Some(dependants_waiting_on_rec_load) = dependants_waiting_on_rec_load { + if let Some(dependents_waiting_on_rec_load) = dependents_waiting_on_rec_load { match rec_dep_load_state { RecursiveDependencyLoadState::Loaded => { - for dep_id in dependants_waiting_on_rec_load { + for dep_id in dependents_waiting_on_rec_load { Self::propagate_loaded_state(self, loaded_asset_id, dep_id, sender); } } RecursiveDependencyLoadState::Failed(ref error) => { - for dep_id in dependants_waiting_on_rec_load { + for dep_id in dependents_waiting_on_rec_load { Self::propagate_failed_state(self, loaded_asset_id, dep_id, error); } } RecursiveDependencyLoadState::Loading | RecursiveDependencyLoadState::NotLoaded => { - // dependants_waiting_on_rec_load should be None in this case + // dependents_waiting_on_rec_load should be None in this case unreachable!("`Loading` and `NotLoaded` state should never be propagated.") } } @@ -554,17 +553,17 @@ impl AssetInfos { waiting_id: UntypedAssetId, sender: &Sender, ) { - let dependants_waiting_on_rec_load = if let Some(info) = infos.get_mut(waiting_id) { + let dependents_waiting_on_rec_load = if let Some(info) = infos.get_mut(waiting_id) { info.loading_rec_dependencies.remove(&loaded_id); if info.loading_rec_dependencies.is_empty() && info.failed_rec_dependencies.is_empty() { info.rec_dep_load_state = RecursiveDependencyLoadState::Loaded; - if info.load_state == LoadState::Loaded { + if info.load_state.is_loaded() { sender .send(InternalAssetEvent::LoadedWithDependencies { id: waiting_id }) .unwrap(); } Some(core::mem::take( - &mut info.dependants_waiting_on_recursive_dep_load, + &mut info.dependents_waiting_on_recursive_dep_load, )) } else { None @@ -573,8 +572,8 @@ impl AssetInfos { None }; - if let Some(dependants_waiting_on_rec_load) = dependants_waiting_on_rec_load { - for dep_id in dependants_waiting_on_rec_load { + if let Some(dependents_waiting_on_rec_load) = dependents_waiting_on_rec_load { + for dep_id in dependents_waiting_on_rec_load { Self::propagate_loaded_state(infos, waiting_id, dep_id, sender); } } @@ -587,19 +586,19 @@ impl AssetInfos { waiting_id: UntypedAssetId, error: &Arc, ) { - let dependants_waiting_on_rec_load = if let Some(info) = infos.get_mut(waiting_id) { + let dependents_waiting_on_rec_load = if let Some(info) = infos.get_mut(waiting_id) { info.loading_rec_dependencies.remove(&failed_id); info.failed_rec_dependencies.insert(failed_id); info.rec_dep_load_state = RecursiveDependencyLoadState::Failed(error.clone()); Some(core::mem::take( - &mut info.dependants_waiting_on_recursive_dep_load, + &mut info.dependents_waiting_on_recursive_dep_load, )) } else { None }; - if let Some(dependants_waiting_on_rec_load) = dependants_waiting_on_rec_load { - for dep_id in dependants_waiting_on_rec_load { + if let Some(dependents_waiting_on_rec_load) = dependents_waiting_on_rec_load { + for dep_id in dependents_waiting_on_rec_load { Self::propagate_failed_state(infos, waiting_id, dep_id, error); } } @@ -612,7 +611,7 @@ impl AssetInfos { } let error = Arc::new(error); - let (dependants_waiting_on_load, dependants_waiting_on_rec_load) = { + let (dependents_waiting_on_load, dependents_waiting_on_rec_load) = { let Some(info) = self.get_mut(failed_id) else { // The asset was already dropped. return; @@ -620,37 +619,40 @@ impl AssetInfos { info.load_state = LoadState::Failed(error.clone()); info.dep_load_state = DependencyLoadState::Failed(error.clone()); info.rec_dep_load_state = RecursiveDependencyLoadState::Failed(error.clone()); + for waker in info.waiting_tasks.drain(..) { + waker.wake(); + } ( - core::mem::take(&mut info.dependants_waiting_on_load), - core::mem::take(&mut info.dependants_waiting_on_recursive_dep_load), + core::mem::take(&mut info.dependents_waiting_on_load), + core::mem::take(&mut info.dependents_waiting_on_recursive_dep_load), ) }; - for waiting_id in dependants_waiting_on_load { + for waiting_id in dependents_waiting_on_load { if let Some(info) = self.get_mut(waiting_id) { info.loading_dependencies.remove(&failed_id); info.failed_dependencies.insert(failed_id); // don't overwrite DependencyLoadState if already failed to preserve first error - if !(matches!(info.dep_load_state, DependencyLoadState::Failed(_))) { + if !info.dep_load_state.is_failed() { info.dep_load_state = DependencyLoadState::Failed(error.clone()); } } } - for waiting_id in dependants_waiting_on_rec_load { + for waiting_id in dependents_waiting_on_rec_load { Self::propagate_failed_state(self, failed_id, waiting_id, &error); } } - fn remove_dependants_and_labels( + fn remove_dependents_and_labels( info: &AssetInfo, - loader_dependants: &mut HashMap, HashSet>>, + loader_dependents: &mut HashMap, HashSet>>, path: &AssetPath<'static>, living_labeled_assets: &mut HashMap, HashSet>>, ) { for loader_dependency in info.loader_dependencies.keys() { - if let Some(dependants) = loader_dependants.get_mut(loader_dependency) { - dependants.remove(path); + if let Some(dependents) = loader_dependents.get_mut(loader_dependency) { + dependents.remove(path); } } @@ -674,7 +676,7 @@ impl AssetInfos { fn process_handle_drop_internal( infos: &mut HashMap, path_to_id: &mut HashMap, TypeIdMap>, - loader_dependants: &mut HashMap, HashSet>>, + loader_dependents: &mut HashMap, HashSet>>, living_labeled_assets: &mut HashMap, HashSet>>, pending_tasks: &mut HashMap>, watching_for_changes: bool, @@ -701,9 +703,9 @@ impl AssetInfos { }; if watching_for_changes { - Self::remove_dependants_and_labels( + Self::remove_dependents_and_labels( &info, - loader_dependants, + loader_dependents, path, living_labeled_assets, ); @@ -733,7 +735,7 @@ impl AssetInfos { Self::process_handle_drop_internal( &mut self.infos, &mut self.path_to_id, - &mut self.loader_dependants, + &mut self.loader_dependents, &mut self.living_labeled_assets, &mut self.pending_tasks, self.watching_for_changes, diff --git a/crates/bevy_asset/src/server/loaders.rs b/crates/bevy_asset/src/server/loaders.rs index 8c63a2306e132d..2442de389ae3db 100644 --- a/crates/bevy_asset/src/server/loaders.rs +++ b/crates/bevy_asset/src/server/loaders.rs @@ -5,10 +5,7 @@ use crate::{ use alloc::sync::Arc; use async_broadcast::RecvError; use bevy_tasks::IoTaskPool; -use bevy_utils::{ - tracing::{error, warn}, - HashMap, TypeIdMap, -}; +use bevy_utils::{tracing::warn, HashMap, TypeIdMap}; #[cfg(feature = "trace")] use bevy_utils::{ tracing::{info_span, instrument::Instrument}, diff --git a/crates/bevy_asset/src/server/mod.rs b/crates/bevy_asset/src/server/mod.rs index 316642a45a260f..6a2e4752190419 100644 --- a/crates/bevy_asset/src/server/mod.rs +++ b/crates/bevy_asset/src/server/mod.rs @@ -25,17 +25,13 @@ use bevy_utils::{ tracing::{error, info}, HashSet, }; -use core::{ - any::{Any, TypeId}, - future::Future, - panic::AssertUnwindSafe, -}; +use core::{any::TypeId, future::Future, panic::AssertUnwindSafe, task::Poll}; use crossbeam_channel::{Receiver, Sender}; use either::Either; use futures_lite::{FutureExt, StreamExt}; use info::*; use loaders::*; -use parking_lot::RwLock; +use parking_lot::{RwLock, RwLockWriteGuard}; use std::path::{Path, PathBuf}; use thiserror::Error; @@ -383,7 +379,7 @@ impl AssetServer { ); if should_load { - self.spawn_load_task(handle.clone().untyped(), path, &mut infos, guard); + self.spawn_load_task(handle.clone().untyped(), path, infos, guard); } handle @@ -407,7 +403,7 @@ impl AssetServer { ); if should_load { - self.spawn_load_task(handle.clone(), path, &mut infos, guard); + self.spawn_load_task(handle.clone(), path, infos, guard); } handle @@ -417,9 +413,13 @@ impl AssetServer { &self, handle: UntypedHandle, path: AssetPath<'static>, - infos: &mut AssetInfos, + infos: RwLockWriteGuard, guard: G, ) { + // drop the lock on `AssetInfos` before spawning a task that may block on it in single-threaded + #[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))] + drop(infos); + let owned_handle = handle.clone(); let server = self.clone(); let task = IoTaskPool::get().spawn(async move { @@ -433,7 +433,10 @@ impl AssetServer { }); #[cfg(not(any(target_arch = "wasm32", not(feature = "multi_threaded"))))] - infos.pending_tasks.insert(handle.id(), task); + { + let mut infos = infos; + infos.pending_tasks.insert(handle.id(), task); + } #[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))] task.detach(); @@ -469,6 +472,11 @@ impl AssetServer { HandleLoadingMode::Request, meta_transform, ); + + // drop the lock on `AssetInfos` before spawning a task that may block on it in single-threaded + #[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))] + drop(infos); + if !should_load { return handle; } @@ -778,6 +786,11 @@ impl AssetServer { let mut infos = self.data.infos.write(); let handle = infos.create_loading_handle_untyped(TypeId::of::(), core::any::type_name::()); + + // drop the lock on `AssetInfos` before spawning a task that may block on it in single-threaded + #[cfg(any(target_arch = "wasm32", not(feature = "multi_threaded")))] + drop(infos); + let id = handle.id(); let event_sender = self.data.asset_event_sender.clone(); @@ -1326,6 +1339,132 @@ impl AssetServer { }) }) } + + /// Returns a future that will suspend until the specified asset and its dependencies finish + /// loading. + /// + /// # Errors + /// + /// This will return an error if the asset or any of its dependencies fail to load, + /// or if the asset has not been queued up to be loaded. + pub async fn wait_for_asset( + &self, + // NOTE: We take a reference to a handle so we know it will outlive the future, + // which ensures the handle won't be dropped while waiting for the asset. + handle: &Handle, + ) -> Result<(), WaitForAssetError> { + self.wait_for_asset_id(handle.id().untyped()).await + } + + /// Returns a future that will suspend until the specified asset and its dependencies finish + /// loading. + /// + /// # Errors + /// + /// This will return an error if the asset or any of its dependencies fail to load, + /// or if the asset has not been queued up to be loaded. + pub async fn wait_for_asset_untyped( + &self, + // NOTE: We take a reference to a handle so we know it will outlive the future, + // which ensures the handle won't be dropped while waiting for the asset. + handle: &UntypedHandle, + ) -> Result<(), WaitForAssetError> { + self.wait_for_asset_id(handle.id()).await + } + + /// Returns a future that will suspend until the specified asset and its dependencies finish + /// loading. + /// + /// Note that since an asset ID does not count as a reference to the asset, + /// the future returned from this method will *not* keep the asset alive. + /// This may lead to the asset unexpectedly being dropped while you are waiting for it to + /// finish loading. + /// + /// When calling this method, make sure a strong handle is stored elsewhere to prevent the + /// asset from being dropped. + /// If you have access to an asset's strong [`Handle`], you should prefer to call + /// [`AssetServer::wait_for_asset`] + /// or [`wait_for_asset_untyped`](Self::wait_for_asset_untyped) to ensure the asset finishes + /// loading. + /// + /// # Errors + /// + /// This will return an error if the asset or any of its dependencies fail to load, + /// or if the asset has not been queued up to be loaded. + pub async fn wait_for_asset_id( + &self, + id: impl Into, + ) -> Result<(), WaitForAssetError> { + let id = id.into(); + core::future::poll_fn(move |cx| self.wait_for_asset_id_poll_fn(cx, id)).await + } + + /// Used by [`wait_for_asset_id`](AssetServer::wait_for_asset_id) in [`poll_fn`](core::future::poll_fn). + fn wait_for_asset_id_poll_fn( + &self, + cx: &mut core::task::Context<'_>, + id: UntypedAssetId, + ) -> Poll> { + let infos = self.data.infos.read(); + + let Some(info) = infos.get(id) else { + return Poll::Ready(Err(WaitForAssetError::NotLoaded)); + }; + + match (&info.load_state, &info.rec_dep_load_state) { + (LoadState::Loaded, RecursiveDependencyLoadState::Loaded) => Poll::Ready(Ok(())), + // Return an error immediately if the asset is not in the process of loading + (LoadState::NotLoaded, _) => Poll::Ready(Err(WaitForAssetError::NotLoaded)), + // If the asset is loading, leave our waker behind + (LoadState::Loading, _) + | (_, RecursiveDependencyLoadState::Loading) + | (LoadState::Loaded, RecursiveDependencyLoadState::NotLoaded) => { + // Check if our waker is already there + let has_waker = info + .waiting_tasks + .iter() + .any(|waker| waker.will_wake(cx.waker())); + + if has_waker { + return Poll::Pending; + } + + let mut infos = { + // Must drop read-only guard to acquire write guard + drop(infos); + self.data.infos.write() + }; + + let Some(info) = infos.get_mut(id) else { + return Poll::Ready(Err(WaitForAssetError::NotLoaded)); + }; + + // If the load state changed while reacquiring the lock, immediately + // reawaken the task + let is_loading = matches!( + (&info.load_state, &info.rec_dep_load_state), + (LoadState::Loading, _) + | (_, RecursiveDependencyLoadState::Loading) + | (LoadState::Loaded, RecursiveDependencyLoadState::NotLoaded) + ); + + if !is_loading { + cx.waker().wake_by_ref(); + } else { + // Leave our waker behind + info.waiting_tasks.push(cx.waker().clone()); + } + + Poll::Pending + } + (LoadState::Failed(error), _) => { + Poll::Ready(Err(WaitForAssetError::Failed(error.clone()))) + } + (_, RecursiveDependencyLoadState::Failed(error)) => { + Poll::Ready(Err(WaitForAssetError::DependencyFailed(error.clone()))) + } + } + } } /// A system that manages internal [`AssetServer`] events, such as finalizing asset loads. @@ -1349,6 +1488,11 @@ pub fn handle_internal_asset_events(world: &mut World) { .get(&id.type_id()) .expect("Asset event sender should exist"); sender(world, id); + if let Some(info) = infos.get_mut(id) { + for waker in info.waiting_tasks.drain(..) { + waker.wake(); + } + } } InternalAssetEvent::Failed { id, path, error } => { infos.process_asset_fail(id, error.clone()); @@ -1379,10 +1523,10 @@ pub fn handle_internal_asset_events(world: &mut World) { infos: &AssetInfos, paths_to_reload: &mut HashSet>, ) { - if let Some(dependants) = infos.loader_dependants.get(asset_path) { - for dependant in dependants { - paths_to_reload.insert(dependant.to_owned()); - queue_ancestors(dependant, infos, paths_to_reload); + if let Some(dependents) = infos.loader_dependents.get(asset_path) { + for dependent in dependents { + paths_to_reload.insert(dependent.to_owned()); + queue_ancestors(dependent, infos, paths_to_reload); } } } @@ -1400,7 +1544,7 @@ pub fn handle_internal_asset_events(world: &mut World) { } }; - let mut paths_to_reload = HashSet::new(); + let mut paths_to_reload = >::default(); let mut handle_event = |source: AssetSourceId<'static>, event: AssetSourceEvent| { match event { // TODO: if the asset was processed and the processed file was changed, the first modified event @@ -1472,44 +1616,87 @@ pub(crate) enum InternalAssetEvent { } /// The load state of an asset. -#[derive(Component, Clone, Debug, PartialEq, Eq)] +#[derive(Component, Clone, Debug)] pub enum LoadState { /// The asset has not started loading yet NotLoaded, + /// The asset is in the process of loading. Loading, + /// The asset has been loaded and has been added to the [`World`] Loaded, + /// The asset failed to load. The underlying [`AssetLoadError`] is /// referenced by [`Arc`] clones in all related [`DependencyLoadState`]s /// and [`RecursiveDependencyLoadState`]s in the asset's dependency tree. Failed(Arc), } +impl LoadState { + /// Returns `true` if this instance is [`LoadState::Loading`] + pub fn is_loading(&self) -> bool { + matches!(self, Self::Loading) + } + + /// Returns `true` if this instance is [`LoadState::Loaded`] + pub fn is_loaded(&self) -> bool { + matches!(self, Self::Loaded) + } + + /// Returns `true` if this instance is [`LoadState::Failed`] + pub fn is_failed(&self) -> bool { + matches!(self, Self::Failed(_)) + } +} + /// The load state of an asset's dependencies. -#[derive(Component, Clone, Debug, Eq, PartialEq)] +#[derive(Component, Clone, Debug)] pub enum DependencyLoadState { /// The asset has not started loading yet NotLoaded, + /// Dependencies are still loading Loading, + /// Dependencies have all loaded Loaded, + /// One or more dependencies have failed to load. The underlying [`AssetLoadError`] /// is referenced by [`Arc`] clones in all related [`LoadState`] and /// [`RecursiveDependencyLoadState`]s in the asset's dependency tree. Failed(Arc), } +impl DependencyLoadState { + /// Returns `true` if this instance is [`DependencyLoadState::Loading`] + pub fn is_loading(&self) -> bool { + matches!(self, Self::Loading) + } + + /// Returns `true` if this instance is [`DependencyLoadState::Loaded`] + pub fn is_loaded(&self) -> bool { + matches!(self, Self::Loaded) + } + + /// Returns `true` if this instance is [`DependencyLoadState::Failed`] + pub fn is_failed(&self) -> bool { + matches!(self, Self::Failed(_)) + } +} + /// The recursive load state of an asset's dependencies. -#[derive(Component, Clone, Debug, Eq, PartialEq)] +#[derive(Component, Clone, Debug)] pub enum RecursiveDependencyLoadState { /// The asset has not started loading yet NotLoaded, + /// Dependencies in this asset's dependency tree are still loading Loading, + /// Dependencies in this asset's dependency tree have all loaded Loaded, + /// One or more dependencies have failed to load in this asset's dependency /// tree. The underlying [`AssetLoadError`] is referenced by [`Arc`] clones /// in all related [`LoadState`]s and [`DependencyLoadState`]s in the asset's @@ -1517,8 +1704,25 @@ pub enum RecursiveDependencyLoadState { Failed(Arc), } +impl RecursiveDependencyLoadState { + /// Returns `true` if this instance is [`RecursiveDependencyLoadState::Loading`] + pub fn is_loading(&self) -> bool { + matches!(self, Self::Loading) + } + + /// Returns `true` if this instance is [`RecursiveDependencyLoadState::Loaded`] + pub fn is_loaded(&self) -> bool { + matches!(self, Self::Loaded) + } + + /// Returns `true` if this instance is [`RecursiveDependencyLoadState::Failed`] + pub fn is_failed(&self) -> bool { + matches!(self, Self::Failed(_)) + } +} + /// An error that occurs during an [`Asset`] load. -#[derive(Error, Debug, Clone, PartialEq, Eq)] +#[derive(Error, Debug, Clone)] pub enum AssetLoadError { #[error("Requested handle of type {requested:?} for asset '{path}' does not match actual asset type '{actual_asset_name}', which used loader '{loader_name}'")] RequestedHandleTypeMismatch { @@ -1554,8 +1758,10 @@ pub enum AssetLoadError { error: Box, }, #[error("Asset '{path}' is configured to be processed. It cannot be loaded directly.")] + #[from(ignore)] CannotLoadProcessedAsset { path: AssetPath<'static> }, #[error("Asset '{path}' is configured to be ignored. It cannot be loaded.")] + #[from(ignore)] CannotLoadIgnoredAsset { path: AssetPath<'static> }, #[error("Failed to load asset '{path}', asset loader '{loader_name}' panicked")] AssetLoaderPanic { @@ -1586,18 +1792,6 @@ pub struct AssetLoaderError { error: Arc, } -impl PartialEq for AssetLoaderError { - /// Equality comparison for `AssetLoaderError::error` is not full (only through `TypeId`) - #[inline] - fn eq(&self, other: &Self) -> bool { - self.path == other.path - && self.loader_name == other.loader_name - && self.error.type_id() == other.error.type_id() - } -} - -impl Eq for AssetLoaderError {} - impl AssetLoaderError { pub fn path(&self) -> &AssetPath<'static> { &self.path @@ -1610,19 +1804,9 @@ pub struct AddAsyncError { error: Arc, } -impl PartialEq for AddAsyncError { - /// Equality comparison is not full (only through `TypeId`) - #[inline] - fn eq(&self, other: &Self) -> bool { - self.error.type_id() == other.error.type_id() - } -} - -impl Eq for AddAsyncError {} - /// An error that occurs when an [`AssetLoader`] is not registered for a given extension. #[derive(Error, Debug, Clone, PartialEq, Eq)] -#[error("no `AssetLoader` found{}", format_missing_asset_ext(.extensions))] +#[error("no `AssetLoader` found{}", format_missing_asset_ext(extensions))] pub struct MissingAssetLoaderForExtensionError { extensions: Vec, } @@ -1664,3 +1848,14 @@ impl core::fmt::Debug for AssetServer { /// This is appended to asset sources when loading a [`LoadedUntypedAsset`]. This provides a unique /// source for a given [`AssetPath`]. const UNTYPED_SOURCE_SUFFIX: &str = "--untyped"; + +/// An error when attempting to wait asynchronously for an [`Asset`] to load. +#[derive(Error, Debug, Clone)] +pub enum WaitForAssetError { + #[error("tried to wait for an asset that is not being loaded")] + NotLoaded, + #[error(transparent)] + Failed(Arc), + #[error(transparent)] + DependencyFailed(Arc), +} diff --git a/crates/bevy_audio/Cargo.toml b/crates/bevy_audio/Cargo.toml index ff858e168b3d76..7df10a1bcbd57c 100644 --- a/crates/bevy_audio/Cargo.toml +++ b/crates/bevy_audio/Cargo.toml @@ -23,13 +23,13 @@ bevy_derive = { path = "../bevy_derive", version = "0.15.0-dev" } bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } # other -rodio = { version = "0.19", default-features = false } +rodio = { version = "0.20", default-features = false } [target.'cfg(target_os = "android")'.dependencies] cpal = { version = "0.15", optional = true } [target.'cfg(target_arch = "wasm32")'.dependencies] -rodio = { version = "0.19", default-features = false, features = [ +rodio = { version = "0.20", default-features = false, features = [ "wasm-bindgen", ] } diff --git a/crates/bevy_audio/src/audio.rs b/crates/bevy_audio/src/audio.rs index 58dc0e49a1e6be..89bd186e1acbf7 100644 --- a/crates/bevy_audio/src/audio.rs +++ b/crates/bevy_audio/src/audio.rs @@ -36,6 +36,12 @@ impl Volume { #[derive(Debug, Clone, Copy, Reflect)] pub enum PlaybackMode { /// Play the sound once. Do nothing when it ends. + /// + /// Note: It is not possible to reuse an `AudioPlayer` after it has finished playing and + /// the underlying `AudioSink` or `SpatialAudioSink` has been drained. + /// + /// To replay a sound, the audio components provided by `AudioPlayer` must be removed and + /// added again. Once, /// Repeat the sound forever. Loop, @@ -77,13 +83,18 @@ pub struct PlaybackSettings { impl Default for PlaybackSettings { fn default() -> Self { - // TODO: what should the default be: ONCE/DESPAWN/REMOVE? Self::ONCE } } impl PlaybackSettings { /// Will play the associated audio source once. + /// + /// Note: It is not possible to reuse an `AudioPlayer` after it has finished playing and + /// the underlying `AudioSink` or `SpatialAudioSink` has been drained. + /// + /// To replay a sound, the audio components provided by `AudioPlayer` must be removed and + /// added again. pub const ONCE: PlaybackSettings = PlaybackSettings { mode: PlaybackMode::Once, volume: Volume(1.0), @@ -264,6 +275,17 @@ where } } +impl AudioPlayer { + /// Creates a new [`AudioPlayer`] with the given [`Handle`]. + /// + /// For convenience reasons, this hard-codes the [`AudioSource`] type. If you want to + /// initialize an [`AudioPlayer`] with a different type, just initialize it directly using normal + /// tuple struct syntax. + pub fn new(source: Handle) -> Self { + Self(source) + } +} + /// Bundle for playing a sound. /// /// Insert this bundle onto an entity to trigger a sound source to begin playing. diff --git a/crates/bevy_audio/src/audio_output.rs b/crates/bevy_audio/src/audio_output.rs index c595be70ca6090..bbb9c5b6821336 100644 --- a/crates/bevy_audio/src/audio_output.rs +++ b/crates/bevy_audio/src/audio_output.rs @@ -2,7 +2,7 @@ use crate::{ AudioPlayer, Decodable, DefaultSpatialScale, GlobalVolume, PlaybackMode, PlaybackSettings, SpatialAudioSink, SpatialListener, }; -use bevy_asset::{Asset, Assets, Handle}; +use bevy_asset::{Asset, Assets}; use bevy_ecs::{prelude::*, system::SystemParam}; use bevy_hierarchy::DespawnRecursiveExt; use bevy_math::Vec3; @@ -101,7 +101,7 @@ pub(crate) fn play_queued_audio_system( query_nonplaying: Query< ( Entity, - &Handle, + &AudioPlayer, &PlaybackSettings, Option<&GlobalTransform>, ), @@ -119,7 +119,7 @@ pub(crate) fn play_queued_audio_system( }; for (entity, source_handle, settings, maybe_emitter_transform) in &query_nonplaying { - let Some(audio_source) = audio_sources.get(source_handle) else { + let Some(audio_source) = audio_sources.get(&source_handle.0) else { continue; }; // audio data is available (has loaded), begin playback and insert sink component @@ -236,19 +236,19 @@ pub(crate) fn cleanup_finished_audio( mut commands: Commands, query_nonspatial_despawn: Query< (Entity, &AudioSink), - (With, With>), + (With, With>), >, query_spatial_despawn: Query< (Entity, &SpatialAudioSink), - (With, With>), + (With, With>), >, query_nonspatial_remove: Query< (Entity, &AudioSink), - (With, With>), + (With, With>), >, query_spatial_remove: Query< (Entity, &SpatialAudioSink), - (With, With>), + (With, With>), >, ) { for (entity, sink) in &query_nonspatial_despawn { diff --git a/crates/bevy_audio/src/lib.rs b/crates/bevy_audio/src/lib.rs index 5120a341a1e91d..1519987a4b1555 100644 --- a/crates/bevy_audio/src/lib.rs +++ b/crates/bevy_audio/src/lib.rs @@ -21,7 +21,7 @@ //! //! fn play_background_audio(asset_server: Res, mut commands: Commands) { //! commands.spawn(( -//! AudioPlayer::(asset_server.load("background_audio.ogg")), +//! AudioPlayer::new(asset_server.load("background_audio.ogg")), //! PlaybackSettings::LOOP, //! )); //! } diff --git a/crates/bevy_color/Cargo.toml b/crates/bevy_color/Cargo.toml index 27f0e5bdbe6037..cb534729501616 100644 --- a/crates/bevy_color/Cargo.toml +++ b/crates/bevy_color/Cargo.toml @@ -7,22 +7,33 @@ homepage = "https://bevyengine.org" repository = "https://github.com/bevyengine/bevy" license = "MIT OR Apache-2.0" keywords = ["bevy", "color"] -rust-version = "1.76.0" +rust-version = "1.82.0" [dependencies] -bevy_math = { path = "../bevy_math", version = "0.15.0-dev", default-features = false } +bevy_math = { path = "../bevy_math", version = "0.15.0-dev", default-features = false, features = [ + "curve", +] } bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev", features = [ "bevy", ], optional = true } bytemuck = { version = "1", features = ["derive"] } -serde = { version = "1.0", features = ["derive"], optional = true } -thiserror = "1.0" -wgpu-types = { version = "22", default-features = false, optional = true } -encase = { version = "0.10", default-features = false } +serde = { version = "1.0", features = [ + "derive", +], default-features = false, optional = true } +thiserror = { version = "2", default-features = false } +derive_more = { version = "1", default-features = false, features = ["from"] } +wgpu-types = { version = "23", default-features = false, optional = true } +encase = { version = "0.10", default-features = false, optional = true } [features] -default = ["bevy_reflect"] -serialize = ["serde"] +default = ["std", "bevy_reflect", "encase"] +std = ["alloc", "bevy_math/std", "serde?/std"] +alloc = ["bevy_math/alloc", "serde?/alloc"] +serialize = ["serde", "bevy_math/serialize"] +bevy_reflect = ["dep:bevy_reflect", "std"] +wgpu-types = ["dep:wgpu-types", "std"] +encase = ["dep:encase", "std"] +libm = ["bevy_math/libm"] [lints] workspace = true diff --git a/crates/bevy_color/src/color.rs b/crates/bevy_color/src/color.rs index 92fa2b98fd5592..d2e4cb792187c5 100644 --- a/crates/bevy_color/src/color.rs +++ b/crates/bevy_color/src/color.rs @@ -4,6 +4,7 @@ use crate::{ }; #[cfg(feature = "bevy_reflect")] use bevy_reflect::prelude::*; +use derive_more::derive::From; /// An enumerated type that can represent any of the color types in this crate. /// @@ -40,7 +41,7 @@ use bevy_reflect::prelude::*; /// due to its perceptual uniformity and broad support for Bevy's color operations. /// To avoid the cost of repeated conversion, and ensure consistent results where that is desired, /// first convert this [`Color`] into your desired color space. -#[derive(Debug, Clone, Copy, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq, From)] #[cfg_attr(feature = "bevy_reflect", derive(Reflect), reflect(PartialEq, Default))] #[cfg_attr(feature = "serialize", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr( @@ -122,7 +123,7 @@ impl Color { } /// Reads an array of floats to creates a new [`Color`] object storing a [`Srgba`] color with an alpha of 1.0. - pub fn srgb_from_array(array: [f32; 3]) -> Self { + pub const fn srgb_from_array(array: [f32; 3]) -> Self { Self::Srgba(Srgba { red: array[0], green: array[1], @@ -142,7 +143,7 @@ impl Color { /// Creates a new [`Color`] object storing a [`Srgba`] color from [`u8`] values. /// /// A value of 0 is interpreted as 0.0, and a value of 255 is interpreted as 1.0. - pub fn srgba_u8(red: u8, green: u8, blue: u8, alpha: u8) -> Self { + pub const fn srgba_u8(red: u8, green: u8, blue: u8, alpha: u8) -> Self { Self::Srgba(Srgba { red: red as f32 / 255.0, green: green as f32 / 255.0, @@ -162,7 +163,7 @@ impl Color { /// Creates a new [`Color`] object storing a [`Srgba`] color from [`u8`] values with an alpha of 1.0. /// /// A value of 0 is interpreted as 0.0, and a value of 255 is interpreted as 1.0. - pub fn srgb_u8(red: u8, green: u8, blue: u8) -> Self { + pub const fn srgb_u8(red: u8, green: u8, blue: u8) -> Self { Self::Srgba(Srgba { red: red as f32 / 255.0, green: green as f32 / 255.0, @@ -426,66 +427,6 @@ impl Alpha for Color { } } -impl From for Color { - fn from(value: Srgba) -> Self { - Self::Srgba(value) - } -} - -impl From for Color { - fn from(value: LinearRgba) -> Self { - Self::LinearRgba(value) - } -} - -impl From for Color { - fn from(value: Hsla) -> Self { - Self::Hsla(value) - } -} - -impl From for Color { - fn from(value: Hsva) -> Self { - Self::Hsva(value) - } -} - -impl From for Color { - fn from(value: Hwba) -> Self { - Self::Hwba(value) - } -} - -impl From for Color { - fn from(value: Oklaba) -> Self { - Self::Oklaba(value) - } -} - -impl From for Color { - fn from(value: Oklcha) -> Self { - Self::Oklcha(value) - } -} - -impl From for Color { - fn from(value: Lcha) -> Self { - Self::Lcha(value) - } -} - -impl From for Color { - fn from(value: Laba) -> Self { - Self::Laba(value) - } -} - -impl From for Color { - fn from(value: Xyza) -> Self { - Self::Xyza(value) - } -} - impl From for Srgba { fn from(value: Color) -> Self { match value { diff --git a/crates/bevy_color/src/color_difference.rs b/crates/bevy_color/src/color_difference.rs index a2bdf10e14549a..b0a00d2bd0a0a1 100644 --- a/crates/bevy_color/src/color_difference.rs +++ b/crates/bevy_color/src/color_difference.rs @@ -1,11 +1,13 @@ //! Module for calculating distance between two colors in the same color space. +use bevy_math::ops; + /// Calculate the distance between this and another color as if they were coordinates /// in a Euclidean space. Alpha is not considered in the distance calculation. pub trait EuclideanDistance: Sized { /// Distance from `self` to `other`. fn distance(&self, other: &Self) -> f32 { - self.distance_squared(other).sqrt() + ops::sqrt(self.distance_squared(other)) } /// Distance squared from `self` to `other`. diff --git a/crates/bevy_color/src/color_gradient.rs b/crates/bevy_color/src/color_gradient.rs index bb6457e6fdc684..759b33bf93e773 100644 --- a/crates/bevy_color/src/color_gradient.rs +++ b/crates/bevy_color/src/color_gradient.rs @@ -1,4 +1,5 @@ use crate::Mix; +use alloc::vec::Vec; use bevy_math::curve::{ cores::{EvenCore, EvenCoreError}, Curve, Interval, @@ -54,13 +55,21 @@ impl Curve for ColorCurve where T: Mix + Clone, { + #[inline] fn domain(&self) -> Interval { self.core.domain() } - fn sample_unchecked(&self, t: f32) -> T { + #[inline] + fn sample_clamped(&self, t: f32) -> T { + // `EvenCore::sample_with` clamps the input implicitly. self.core.sample_with(t, T::mix) } + + #[inline] + fn sample_unchecked(&self, t: f32) -> T { + self.sample_clamped(t) + } } #[cfg(test)] diff --git a/crates/bevy_color/src/color_ops.rs b/crates/bevy_color/src/color_ops.rs index 4fda6eb4e43b29..235c8c8bf3ae15 100644 --- a/crates/bevy_color/src/color_ops.rs +++ b/crates/bevy_color/src/color_ops.rs @@ -1,4 +1,4 @@ -use bevy_math::{Vec3, Vec4}; +use bevy_math::{ops, Vec3, Vec4}; /// Methods for changing the luminance of a color. Note that these methods are not /// guaranteed to produce consistent results across color spaces, @@ -90,7 +90,7 @@ pub trait Hue: Sized { /// Return a new version of this color with the hue channel rotated by the given degrees. fn rotate_hue(&self, degrees: f32) -> Self { - let rotated_hue = (self.hue() + degrees).rem_euclid(360.); + let rotated_hue = ops::rem_euclid(self.hue() + degrees, 360.); self.with_hue(rotated_hue) } } @@ -131,8 +131,8 @@ pub trait ColorToPacked { /// takes the shortest path around the color wheel, and that the result is always between /// 0 and 360. pub(crate) fn lerp_hue(a: f32, b: f32, t: f32) -> f32 { - let diff = (b - a + 180.0).rem_euclid(360.) - 180.; - (a + diff * t).rem_euclid(360.0) + let diff = ops::rem_euclid(b - a + 180.0, 360.) - 180.; + ops::rem_euclid(a + diff * t, 360.) } #[cfg(test)] diff --git a/crates/bevy_color/src/hwba.rs b/crates/bevy_color/src/hwba.rs index 078f56cfb7c9a5..459b5d82dc4fd6 100644 --- a/crates/bevy_color/src/hwba.rs +++ b/crates/bevy_color/src/hwba.rs @@ -5,7 +5,7 @@ use crate::{ Alpha, ColorToComponents, Gray, Hue, Lcha, LinearRgba, Mix, Srgba, StandardColor, Xyza, }; -use bevy_math::{Vec3, Vec4}; +use bevy_math::{ops, Vec3, Vec4}; #[cfg(feature = "bevy_reflect")] use bevy_reflect::prelude::*; @@ -239,7 +239,7 @@ impl From for Srgba { let v = 1. - blackness; let h = (hue % 360.) / 60.; - let i = h.floor(); + let i = ops::floor(h); let f = h - i; let i = i as u8; diff --git a/crates/bevy_color/src/lib.rs b/crates/bevy_color/src/lib.rs index f91080e389acbd..4a4a9596d545d8 100644 --- a/crates/bevy_color/src/lib.rs +++ b/crates/bevy_color/src/lib.rs @@ -4,6 +4,7 @@ html_logo_url = "https://bevyengine.org/assets/icon.png", html_favicon_url = "https://bevyengine.org/assets/icon.png" )] +#![cfg_attr(not(feature = "std"), no_std)] //! Representations of colors in various color spaces. //! @@ -89,8 +90,12 @@ //! println!("Hsla: {:?}", hsla); //! ``` +#[cfg(feature = "alloc")] +extern crate alloc; + mod color; pub mod color_difference; +#[cfg(feature = "alloc")] mod color_gradient; mod color_ops; mod color_range; @@ -121,6 +126,7 @@ pub mod prelude { } pub use color::*; +#[cfg(feature = "alloc")] pub use color_gradient::*; pub use color_ops::*; pub use color_range::*; diff --git a/crates/bevy_color/src/linear_rgba.rs b/crates/bevy_color/src/linear_rgba.rs index 3a9a0fd5d2641b..d1781bfc4192c7 100644 --- a/crates/bevy_color/src/linear_rgba.rs +++ b/crates/bevy_color/src/linear_rgba.rs @@ -2,7 +2,7 @@ use crate::{ color_difference::EuclideanDistance, impl_componentwise_vector_space, Alpha, ColorToComponents, ColorToPacked, Gray, Luminance, Mix, StandardColor, }; -use bevy_math::{Vec3, Vec4}; +use bevy_math::{ops, Vec3, Vec4}; #[cfg(feature = "bevy_reflect")] use bevy_reflect::prelude::*; use bytemuck::{Pod, Zeroable}; @@ -302,11 +302,11 @@ impl ColorToComponents for LinearRgba { impl ColorToPacked for LinearRgba { fn to_u8_array(self) -> [u8; 4] { [self.red, self.green, self.blue, self.alpha] - .map(|v| (v.clamp(0.0, 1.0) * 255.0).round() as u8) + .map(|v| ops::round(v.clamp(0.0, 1.0) * 255.0) as u8) } fn to_u8_array_no_alpha(self) -> [u8; 3] { - [self.red, self.green, self.blue].map(|v| (v.clamp(0.0, 1.0) * 255.0).round() as u8) + [self.red, self.green, self.blue].map(|v| ops::round(v.clamp(0.0, 1.0) * 255.0) as u8) } fn from_u8_array(color: [u8; 4]) -> Self { @@ -332,6 +332,7 @@ impl From for wgpu_types::Color { // [`LinearRgba`] is intended to be used with shaders // So it's the only color type that implements [`ShaderType`] to make it easier to use inside shaders +#[cfg(feature = "encase")] impl encase::ShaderType for LinearRgba { type ExtraMetadata = (); @@ -353,6 +354,7 @@ impl encase::ShaderType for LinearRgba { const UNIFORM_COMPAT_ASSERT: fn() = || {}; } +#[cfg(feature = "encase")] impl encase::private::WriteInto for LinearRgba { fn write_into(&self, writer: &mut encase::private::Writer) { for el in &[self.red, self.green, self.blue, self.alpha] { @@ -361,6 +363,7 @@ impl encase::private::WriteInto for LinearRgba { } } +#[cfg(feature = "encase")] impl encase::private::ReadFrom for LinearRgba { fn read_from( &mut self, @@ -380,6 +383,7 @@ impl encase::private::ReadFrom for LinearRgba { } } +#[cfg(feature = "encase")] impl encase::private::CreateFrom for LinearRgba { fn create_from(reader: &mut encase::private::Reader) -> Self where @@ -400,6 +404,7 @@ impl encase::private::CreateFrom for LinearRgba { } } +#[cfg(feature = "encase")] impl encase::ShaderSize for LinearRgba {} #[cfg(test)] diff --git a/crates/bevy_color/src/srgba.rs b/crates/bevy_color/src/srgba.rs index 53c66a09753967..8f4549df3b563c 100644 --- a/crates/bevy_color/src/srgba.rs +++ b/crates/bevy_color/src/srgba.rs @@ -2,6 +2,8 @@ use crate::{ color_difference::EuclideanDistance, impl_componentwise_vector_space, Alpha, ColorToComponents, ColorToPacked, Gray, LinearRgba, Luminance, Mix, StandardColor, Xyza, }; +#[cfg(feature = "alloc")] +use alloc::{format, string::String}; use bevy_math::{ops, Vec3, Vec4}; #[cfg(feature = "bevy_reflect")] use bevy_reflect::prelude::*; @@ -167,6 +169,7 @@ impl Srgba { } /// Convert this color to CSS-style hexadecimal notation. + #[cfg(feature = "alloc")] pub fn to_hex(&self) -> String { let [r, g, b, a] = self.to_u8_array(); match a { @@ -366,11 +369,11 @@ impl ColorToComponents for Srgba { impl ColorToPacked for Srgba { fn to_u8_array(self) -> [u8; 4] { [self.red, self.green, self.blue, self.alpha] - .map(|v| (v.clamp(0.0, 1.0) * 255.0).round() as u8) + .map(|v| ops::round(v.clamp(0.0, 1.0) * 255.0) as u8) } fn to_u8_array_no_alpha(self) -> [u8; 3] { - [self.red, self.green, self.blue].map(|v| (v.clamp(0.0, 1.0) * 255.0).round() as u8) + [self.red, self.green, self.blue].map(|v| ops::round(v.clamp(0.0, 1.0) * 255.0) as u8) } fn from_u8_array(color: [u8; 4]) -> Self { diff --git a/crates/bevy_core/Cargo.toml b/crates/bevy_core/Cargo.toml index aaac27b0020ac2..92dd9336560526 100644 --- a/crates/bevy_core/Cargo.toml +++ b/crates/bevy_core/Cargo.toml @@ -20,7 +20,6 @@ bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } # other serde = { version = "1.0", optional = true } -uuid = "1.0" [features] default = ["bevy_reflect"] diff --git a/crates/bevy_core/src/name.rs b/crates/bevy_core/src/name.rs index 8c00762199dfae..70e7a81cef4f00 100644 --- a/crates/bevy_core/src/name.rs +++ b/crates/bevy_core/src/name.rs @@ -7,9 +7,9 @@ use alloc::borrow::Cow; use bevy_reflect::std_traits::ReflectDefault; #[cfg(feature = "bevy_reflect")] use bevy_reflect::Reflect; -use bevy_utils::AHasher; +use bevy_utils::FixedHasher; use core::{ - hash::{Hash, Hasher}, + hash::{BuildHasher, Hash, Hasher}, ops::Deref, }; @@ -80,9 +80,7 @@ impl Name { } fn update_hash(&mut self) { - let mut hasher = AHasher::default(); - self.name.hash(&mut hasher); - self.hash = hasher.finish(); + self.hash = FixedHasher.hash_one(&self.name); } } diff --git a/crates/bevy_core/src/task_pool_options.rs b/crates/bevy_core/src/task_pool_options.rs index 276902fb499daf..cdb0418a353389 100644 --- a/crates/bevy_core/src/task_pool_options.rs +++ b/crates/bevy_core/src/task_pool_options.rs @@ -1,9 +1,12 @@ use bevy_tasks::{AsyncComputeTaskPool, ComputeTaskPool, IoTaskPool, TaskPoolBuilder}; use bevy_utils::tracing::trace; +use alloc::sync::Arc; +use core::fmt::Debug; + /// Defines a simple way to determine how many threads to use given the number of remaining cores /// and number of total cores -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct TaskPoolThreadAssignmentPolicy { /// Force using at least this many threads pub min_threads: usize, @@ -12,6 +15,22 @@ pub struct TaskPoolThreadAssignmentPolicy { /// Target using this percentage of total cores, clamped by `min_threads` and `max_threads`. It is /// permitted to use 1.0 to try to use all remaining threads pub percent: f32, + /// Callback that is invoked once for every created thread as it starts. + /// This configuration will be ignored under wasm platform. + pub on_thread_spawn: Option>, + /// Callback that is invoked once for every created thread as it terminates + /// This configuration will be ignored under wasm platform. + pub on_thread_destroy: Option>, +} + +impl Debug for TaskPoolThreadAssignmentPolicy { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("TaskPoolThreadAssignmentPolicy") + .field("min_threads", &self.min_threads) + .field("max_threads", &self.max_threads) + .field("percent", &self.percent) + .finish() + } } impl TaskPoolThreadAssignmentPolicy { @@ -61,6 +80,8 @@ impl Default for TaskPoolOptions { min_threads: 1, max_threads: 4, percent: 0.25, + on_thread_spawn: None, + on_thread_destroy: None, }, // Use 25% of cores for async compute, at least 1, no more than 4 @@ -68,6 +89,8 @@ impl Default for TaskPoolOptions { min_threads: 1, max_threads: 4, percent: 0.25, + on_thread_spawn: None, + on_thread_destroy: None, }, // Use all remaining cores for compute (at least 1) @@ -75,6 +98,8 @@ impl Default for TaskPoolOptions { min_threads: 1, max_threads: usize::MAX, percent: 1.0, // This 1.0 here means "whatever is left over" + on_thread_spawn: None, + on_thread_destroy: None, }, } } @@ -108,10 +133,21 @@ impl TaskPoolOptions { remaining_threads = remaining_threads.saturating_sub(io_threads); IoTaskPool::get_or_init(|| { - TaskPoolBuilder::default() + let mut builder = TaskPoolBuilder::default() .num_threads(io_threads) - .thread_name("IO Task Pool".to_string()) - .build() + .thread_name("IO Task Pool".to_string()); + + #[cfg(not(target_arch = "wasm32"))] + { + if let Some(f) = self.io.on_thread_spawn.clone() { + builder = builder.on_thread_spawn(move || f()); + } + if let Some(f) = self.io.on_thread_destroy.clone() { + builder = builder.on_thread_destroy(move || f()); + } + } + + builder.build() }); } @@ -125,10 +161,21 @@ impl TaskPoolOptions { remaining_threads = remaining_threads.saturating_sub(async_compute_threads); AsyncComputeTaskPool::get_or_init(|| { - TaskPoolBuilder::default() + let mut builder = TaskPoolBuilder::default() .num_threads(async_compute_threads) - .thread_name("Async Compute Task Pool".to_string()) - .build() + .thread_name("Async Compute Task Pool".to_string()); + + #[cfg(not(target_arch = "wasm32"))] + { + if let Some(f) = self.async_compute.on_thread_spawn.clone() { + builder = builder.on_thread_spawn(move || f()); + } + if let Some(f) = self.async_compute.on_thread_destroy.clone() { + builder = builder.on_thread_destroy(move || f()); + } + } + + builder.build() }); } @@ -142,10 +189,21 @@ impl TaskPoolOptions { trace!("Compute Threads: {}", compute_threads); ComputeTaskPool::get_or_init(|| { - TaskPoolBuilder::default() + let mut builder = TaskPoolBuilder::default() .num_threads(compute_threads) - .thread_name("Compute Task Pool".to_string()) - .build() + .thread_name("Compute Task Pool".to_string()); + + #[cfg(not(target_arch = "wasm32"))] + { + if let Some(f) = self.compute.on_thread_spawn.clone() { + builder = builder.on_thread_spawn(move || f()); + } + if let Some(f) = self.compute.on_thread_destroy.clone() { + builder = builder.on_thread_destroy(move || f()); + } + } + + builder.build() }); } } diff --git a/crates/bevy_core_pipeline/Cargo.toml b/crates/bevy_core_pipeline/Cargo.toml index 9db01932f402fe..6298abb35d759c 100644 --- a/crates/bevy_core_pipeline/Cargo.toml +++ b/crates/bevy_core_pipeline/Cargo.toml @@ -13,12 +13,12 @@ license = "MIT OR Apache-2.0" keywords = ["bevy"] [features] -dds = ["bevy_render/dds"] +dds = ["bevy_render/dds", "bevy_image/dds"] trace = [] webgl = [] webgpu = [] -tonemapping_luts = ["bevy_render/ktx2", "bevy_render/zstd"] -smaa_luts = ["bevy_render/ktx2", "bevy_render/zstd"] +tonemapping_luts = ["bevy_render/ktx2", "bevy_image/ktx2", "bevy_image/zstd"] +smaa_luts = ["bevy_render/ktx2", "bevy_image/ktx2", "bevy_image/zstd"] [dependencies] # bevy @@ -28,18 +28,20 @@ bevy_core = { path = "../bevy_core", version = "0.15.0-dev" } bevy_color = { path = "../bevy_color", version = "0.15.0-dev" } bevy_derive = { path = "../bevy_derive", version = "0.15.0-dev" } bevy_ecs = { path = "../bevy_ecs", version = "0.15.0-dev" } +bevy_image = { path = "../bevy_image", version = "0.15.0-dev" } bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev" } bevy_render = { path = "../bevy_render", version = "0.15.0-dev" } bevy_transform = { path = "../bevy_transform", version = "0.15.0-dev" } bevy_math = { path = "../bevy_math", version = "0.15.0-dev" } bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } +bevy_window = { path = "../bevy_window", version = "0.15.0-dev" } serde = { version = "1", features = ["derive"] } bitflags = "2.3" radsort = "0.1" nonmax = "0.5" smallvec = "1" -thiserror = "1.0" +thiserror = { version = "2", default-features = false } [lints] workspace = true diff --git a/crates/bevy_core_pipeline/src/auto_exposure/buffers.rs b/crates/bevy_core_pipeline/src/auto_exposure/buffers.rs index 4991a0925bee93..84b4011d2c557e 100644 --- a/crates/bevy_core_pipeline/src/auto_exposure/buffers.rs +++ b/crates/bevy_core_pipeline/src/auto_exposure/buffers.rs @@ -2,7 +2,7 @@ use bevy_ecs::prelude::*; use bevy_render::{ render_resource::{StorageBuffer, UniformBuffer}, renderer::{RenderDevice, RenderQueue}, - world_sync::RenderEntity, + sync_world::RenderEntity, Extract, }; use bevy_utils::{Entry, HashMap}; @@ -27,13 +27,13 @@ pub(super) struct ExtractedStateBuffers { pub(super) fn extract_buffers( mut commands: Commands, - changed: Extract>>, + changed: Extract>>, mut removed: Extract>, ) { commands.insert_resource(ExtractedStateBuffers { changed: changed .iter() - .map(|(entity, settings)| (entity.id(), settings.clone())) + .map(|(entity, settings)| (entity, settings.clone())) .collect(), removed: removed.read().collect(), }); diff --git a/crates/bevy_core_pipeline/src/auto_exposure/compensation_curve.rs b/crates/bevy_core_pipeline/src/auto_exposure/compensation_curve.rs index 67dcead57a834b..25ec27cee4df25 100644 --- a/crates/bevy_core_pipeline/src/auto_exposure/compensation_curve.rs +++ b/crates/bevy_core_pipeline/src/auto_exposure/compensation_curve.rs @@ -191,6 +191,7 @@ impl RenderAsset for GpuAutoExposureCompensationCurve { fn prepare_asset( source: Self::SourceAsset, + _: AssetId, (render_device, render_queue): &mut SystemParamItem, ) -> Result> { let texture = render_device.create_texture_with_data( diff --git a/crates/bevy_core_pipeline/src/auto_exposure/pipeline.rs b/crates/bevy_core_pipeline/src/auto_exposure/pipeline.rs index 410b7deb6d47e0..87d6abd8cf8c5c 100644 --- a/crates/bevy_core_pipeline/src/auto_exposure/pipeline.rs +++ b/crates/bevy_core_pipeline/src/auto_exposure/pipeline.rs @@ -3,11 +3,11 @@ use super::compensation_curve::{ }; use bevy_asset::prelude::*; use bevy_ecs::prelude::*; +use bevy_image::Image; use bevy_render::{ globals::GlobalsUniform, render_resource::{binding_types::*, *}, renderer::RenderDevice, - texture::Image, view::ViewUniform, }; use core::num::NonZero; @@ -89,6 +89,7 @@ impl SpecializedComputePipeline for AutoExposurePipeline { AutoExposurePass::Average => "compute_average".into(), }, push_constant_ranges: vec![], + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/auto_exposure/settings.rs b/crates/bevy_core_pipeline/src/auto_exposure/settings.rs index 066e8d3c8867bb..91bdf836eebee8 100644 --- a/crates/bevy_core_pipeline/src/auto_exposure/settings.rs +++ b/crates/bevy_core_pipeline/src/auto_exposure/settings.rs @@ -3,8 +3,9 @@ use core::ops::RangeInclusive; use super::compensation_curve::AutoExposureCompensationCurve; use bevy_asset::Handle; use bevy_ecs::{prelude::Component, reflect::ReflectComponent}; +use bevy_image::Image; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; -use bevy_render::{extract_component::ExtractComponent, texture::Image}; +use bevy_render::extract_component::ExtractComponent; use bevy_utils::default; /// Component that enables auto exposure for an HDR-enabled 2d or 3d camera. diff --git a/crates/bevy_core_pipeline/src/blit/mod.rs b/crates/bevy_core_pipeline/src/blit/mod.rs index d11a99cbb329d3..96c0394f3034ae 100644 --- a/crates/bevy_core_pipeline/src/blit/mod.rs +++ b/crates/bevy_core_pipeline/src/blit/mod.rs @@ -98,6 +98,7 @@ impl SpecializedRenderPipeline for BlitPipeline { ..Default::default() }, push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/bloom/downsampling_pipeline.rs b/crates/bevy_core_pipeline/src/bloom/downsampling_pipeline.rs index 28d55d360a95fb..e3efe5cad8946f 100644 --- a/crates/bevy_core_pipeline/src/bloom/downsampling_pipeline.rs +++ b/crates/bevy_core_pipeline/src/bloom/downsampling_pipeline.rs @@ -127,6 +127,7 @@ impl SpecializedRenderPipeline for BloomDownsamplingPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/bloom/mod.rs b/crates/bevy_core_pipeline/src/bloom/mod.rs index ff642c1f2f9af7..bfd7ee22dbd105 100644 --- a/crates/bevy_core_pipeline/src/bloom/mod.rs +++ b/crates/bevy_core_pipeline/src/bloom/mod.rs @@ -39,7 +39,7 @@ use upsampling_pipeline::{ const BLOOM_SHADER_HANDLE: Handle = Handle::weak_from_u128(929599476923908); -const BLOOM_TEXTURE_FORMAT: TextureFormat = TextureFormat::Rg11b10Float; +const BLOOM_TEXTURE_FORMAT: TextureFormat = TextureFormat::Rg11b10Ufloat; pub struct BloomPlugin; diff --git a/crates/bevy_core_pipeline/src/bloom/settings.rs b/crates/bevy_core_pipeline/src/bloom/settings.rs index 3fc35dc4d51056..effa135677f3bf 100644 --- a/crates/bevy_core_pipeline/src/bloom/settings.rs +++ b/crates/bevy_core_pipeline/src/bloom/settings.rs @@ -226,7 +226,9 @@ impl ExtractComponent for Bloom { camera.is_active, camera.hdr, ) { - (Some(URect { min: origin, .. }), Some(size), Some(target_size), true, true) => { + (Some(URect { min: origin, .. }), Some(size), Some(target_size), true, true) + if size.x != 0 && size.y != 0 => + { let threshold = bloom.prefilter.threshold; let threshold_softness = bloom.prefilter.threshold_softness; let knee = threshold * threshold_softness.clamp(0.0, 1.0); diff --git a/crates/bevy_core_pipeline/src/bloom/upsampling_pipeline.rs b/crates/bevy_core_pipeline/src/bloom/upsampling_pipeline.rs index 91a23310ef81ca..b63a3eb633485d 100644 --- a/crates/bevy_core_pipeline/src/bloom/upsampling_pipeline.rs +++ b/crates/bevy_core_pipeline/src/bloom/upsampling_pipeline.rs @@ -124,6 +124,7 @@ impl SpecializedRenderPipeline for BloomUpsamplingPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/mod.rs b/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/mod.rs index 71c9cc2bb2a617..fbc3ecfec3f756 100644 --- a/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/mod.rs +++ b/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/mod.rs @@ -6,6 +6,7 @@ use crate::{ use bevy_app::prelude::*; use bevy_asset::{load_internal_asset, Handle}; use bevy_ecs::{prelude::*, query::QueryItem}; +use bevy_image::BevyDefault as _; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ extract_component::{ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin}, @@ -16,7 +17,6 @@ use bevy_render::{ *, }, renderer::RenderDevice, - texture::BevyDefault, view::{ExtractedView, ViewTarget}, Render, RenderApp, RenderSet, }; @@ -233,6 +233,7 @@ impl SpecializedRenderPipeline for CasPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } @@ -242,14 +243,22 @@ fn prepare_cas_pipelines( pipeline_cache: Res, mut pipelines: ResMut>, sharpening_pipeline: Res, - views: Query<(Entity, &ExtractedView, &DenoiseCas), With>, + views: Query< + (Entity, &ExtractedView, &DenoiseCas), + Or<(Added, Changed)>, + >, + mut removals: RemovedComponents, ) { - for (entity, view, cas) in &views { + for entity in removals.read() { + commands.entity(entity).remove::(); + } + + for (entity, view, denoise_cas) in &views { let pipeline_id = pipelines.specialize( &pipeline_cache, &sharpening_pipeline, CasPipelineKey { - denoise: cas.0, + denoise: denoise_cas.0, texture_format: if view.hdr { ViewTarget::TEXTURE_FORMAT_HDR } else { diff --git a/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/robust_contrast_adaptive_sharpening.wgsl b/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/robust_contrast_adaptive_sharpening.wgsl index 252d97c9d6c3e8..03b29976e7ed7d 100644 --- a/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/robust_contrast_adaptive_sharpening.wgsl +++ b/crates/bevy_core_pipeline/src/contrast_adaptive_sharpening/robust_contrast_adaptive_sharpening.wgsl @@ -65,7 +65,7 @@ fn fragment(in: FullscreenVertexOutput) -> @location(0) vec4 { let b = textureSample(screenTexture, samp, in.uv, vec2(0, -1)).rgb; let d = textureSample(screenTexture, samp, in.uv, vec2(-1, 0)).rgb; // We need the alpha value of the pixel we're working on for the output - let e = textureSample(screenTexture, samp, in.uv).rgbw; + let e = textureSample(screenTexture, samp, in.uv).rgba; let f = textureSample(screenTexture, samp, in.uv, vec2(1, 0)).rgb; let h = textureSample(screenTexture, samp, in.uv, vec2(0, 1)).rgb; // Min and max of ring. diff --git a/crates/bevy_core_pipeline/src/core_2d/camera_2d.rs b/crates/bevy_core_pipeline/src/core_2d/camera_2d.rs index 03ddf5c0739154..9f8073e3f51dfd 100644 --- a/crates/bevy_core_pipeline/src/core_2d/camera_2d.rs +++ b/crates/bevy_core_pipeline/src/core_2d/camera_2d.rs @@ -1,10 +1,12 @@ +#![expect(deprecated)] + use crate::{ core_2d::graph::Core2d, tonemapping::{DebandDither, Tonemapping}, }; use bevy_ecs::prelude::*; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; -use bevy_render::world_sync::SyncToRenderWorld; +use bevy_render::sync_world::SyncToRenderWorld; use bevy_render::{ camera::{ Camera, CameraMainTextureUsages, CameraProjection, CameraRenderGraph, @@ -17,12 +19,25 @@ use bevy_render::{ }; use bevy_transform::prelude::{GlobalTransform, Transform}; +/// A 2D camera component. Enables the 2D render graph for a [`Camera`]. #[derive(Component, Default, Reflect, Clone, ExtractComponent)] #[extract_component_filter(With)] #[reflect(Component, Default)] +#[require( + Camera, + DebandDither, + CameraRenderGraph(|| CameraRenderGraph::new(Core2d)), + OrthographicProjection(OrthographicProjection::default_2d), + Frustum(|| OrthographicProjection::default_2d().compute_frustum(&GlobalTransform::from(Transform::default()))), + Tonemapping(|| Tonemapping::None), +)] pub struct Camera2d; #[derive(Bundle, Clone)] +#[deprecated( + since = "0.15.0", + note = "Use the `Camera2d` component instead. Inserting it will now also insert the other components required by it automatically." +)] pub struct Camera2dBundle { pub camera: Camera, pub camera_render_graph: CameraRenderGraph, diff --git a/crates/bevy_core_pipeline/src/core_2d/mod.rs b/crates/bevy_core_pipeline/src/core_2d/mod.rs index 9f07f19bf8a81e..d57134aa3ec07c 100644 --- a/crates/bevy_core_pipeline/src/core_2d/mod.rs +++ b/crates/bevy_core_pipeline/src/core_2d/mod.rs @@ -33,11 +33,15 @@ pub mod graph { use core::ops::Range; use bevy_asset::UntypedAssetId; +use bevy_render::{ + batching::gpu_preprocessing::GpuPreprocessingMode, render_phase::PhaseItemBinKey, +}; use bevy_utils::HashMap; pub use camera_2d::*; pub use main_opaque_pass_2d_node::*; pub use main_transparent_pass_2d_node::*; +use crate::{tonemapping::TonemappingNode, upscaling::UpscalingNode}; use bevy_app::{App, Plugin}; use bevy_ecs::{entity::EntityHashSet, prelude::*}; use bevy_math::FloatOrd; @@ -55,14 +59,12 @@ use bevy_render::{ TextureFormat, TextureUsages, }, renderer::RenderDevice, + sync_world::{MainEntity, RenderEntity}, texture::TextureCache, view::{Msaa, ViewDepthTexture}, - world_sync::RenderEntity, Extract, ExtractSchedule, Render, RenderApp, RenderSet, }; -use crate::{tonemapping::TonemappingNode, upscaling::UpscalingNode}; - use self::graph::{Core2d, Node2d}; pub const CORE_2D_DEPTH_FORMAT: TextureFormat = TextureFormat::Depth32Float; @@ -129,7 +131,7 @@ pub struct Opaque2d { pub key: Opaque2dBinKey, /// An entity from which data will be fetched, including the mesh if /// applicable. - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), /// The ranges of instances. pub batch_range: Range, /// An extra index, which is either a dynamic offset or an index in the @@ -153,10 +155,22 @@ pub struct Opaque2dBinKey { pub material_bind_group_id: Option, } +impl PhaseItemBinKey for Opaque2dBinKey { + type BatchSetKey = (); + + fn get_batch_set_key(&self) -> Option { + None + } +} + impl PhaseItem for Opaque2d { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] @@ -175,7 +189,7 @@ impl PhaseItem for Opaque2d { } fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } fn batch_range_and_extra_index_mut(&mut self) -> (&mut Range, &mut PhaseItemExtraIndex) { @@ -188,7 +202,7 @@ impl BinnedPhaseItem for Opaque2d { fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -214,7 +228,7 @@ pub struct AlphaMask2d { pub key: AlphaMask2dBinKey, /// An entity from which data will be fetched, including the mesh if /// applicable. - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), /// The ranges of instances. pub batch_range: Range, /// An extra index, which is either a dynamic offset or an index in the @@ -241,7 +255,12 @@ pub struct AlphaMask2dBinKey { impl PhaseItem for AlphaMask2d { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + #[inline] + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] @@ -260,7 +279,7 @@ impl PhaseItem for AlphaMask2d { } fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } fn batch_range_and_extra_index_mut(&mut self) -> (&mut Range, &mut PhaseItemExtraIndex) { @@ -273,7 +292,7 @@ impl BinnedPhaseItem for AlphaMask2d { fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -286,6 +305,14 @@ impl BinnedPhaseItem for AlphaMask2d { } } +impl PhaseItemBinKey for AlphaMask2dBinKey { + type BatchSetKey = (); + + fn get_batch_set_key(&self) -> Option { + None + } +} + impl CachedRenderPipelinePhaseItem for AlphaMask2d { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { @@ -296,7 +323,7 @@ impl CachedRenderPipelinePhaseItem for AlphaMask2d { /// Transparent 2D [`SortedPhaseItem`]s. pub struct Transparent2d { pub sort_key: FloatOrd, - pub entity: Entity, + pub entity: (Entity, MainEntity), pub pipeline: CachedRenderPipelineId, pub draw_function: DrawFunctionId, pub batch_range: Range, @@ -306,7 +333,12 @@ pub struct Transparent2d { impl PhaseItem for Transparent2d { #[inline] fn entity(&self) -> Entity { - self.entity + self.entity.0 + } + + #[inline] + fn main_entity(&self) -> MainEntity { + self.entity.1 } #[inline] @@ -326,7 +358,7 @@ impl PhaseItem for Transparent2d { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -361,7 +393,7 @@ pub fn extract_core_2d_camera_phases( mut transparent_2d_phases: ResMut>, mut opaque_2d_phases: ResMut>, mut alpha_mask_2d_phases: ResMut>, - cameras_2d: Extract>>, + cameras_2d: Extract>>, mut live_entities: Local, ) { live_entities.clear(); @@ -370,10 +402,9 @@ pub fn extract_core_2d_camera_phases( if !camera.is_active { continue; } - let entity = entity.id(); transparent_2d_phases.insert_or_clear(entity); - opaque_2d_phases.insert_or_clear(entity); - alpha_mask_2d_phases.insert_or_clear(entity); + opaque_2d_phases.insert_or_clear(entity, GpuPreprocessingMode::None); + alpha_mask_2d_phases.insert_or_clear(entity, GpuPreprocessingMode::None); live_entities.insert(entity); } @@ -392,7 +423,7 @@ pub fn prepare_core_2d_depth_textures( opaque_2d_phases: Res>, views_2d: Query<(Entity, &ExtractedCamera, &Msaa), (With,)>, ) { - let mut textures = HashMap::default(); + let mut textures = >::default(); for (view, camera, msaa) in &views_2d { if !opaque_2d_phases.contains_key(&view) || !transparent_2d_phases.contains_key(&view) { continue; diff --git a/crates/bevy_core_pipeline/src/core_3d/camera_3d.rs b/crates/bevy_core_pipeline/src/core_3d/camera_3d.rs index 454892a30684c0..2053b968828172 100644 --- a/crates/bevy_core_pipeline/src/core_3d/camera_3d.rs +++ b/crates/bevy_core_pipeline/src/core_3d/camera_3d.rs @@ -1,3 +1,5 @@ +#![expect(deprecated)] + use crate::{ core_3d::graph::Core3d, tonemapping::{DebandDither, Tonemapping}, @@ -9,18 +11,28 @@ use bevy_render::{ extract_component::ExtractComponent, primitives::Frustum, render_resource::{LoadOp, TextureUsages}, + sync_world::SyncToRenderWorld, view::{ColorGrading, Msaa, VisibleEntities}, - world_sync::SyncToRenderWorld, }; use bevy_transform::prelude::{GlobalTransform, Transform}; use serde::{Deserialize, Serialize}; -/// Configuration for the "main 3d render graph". -/// The camera coordinate space is right-handed x-right, y-up, z-back. +/// A 3D camera component. Enables the main 3D render graph for a [`Camera`]. +/// +/// The camera coordinate space is right-handed X-right, Y-up, Z-back. /// This means "forward" is -Z. #[derive(Component, Reflect, Clone, ExtractComponent)] #[extract_component_filter(With)] #[reflect(Component, Default)] +#[require( + Camera, + DebandDither(|| DebandDither::Enabled), + CameraRenderGraph(|| CameraRenderGraph::new(Core3d)), + Projection, + Tonemapping, + ColorGrading, + Exposure +)] pub struct Camera3d { /// The depth clear operation to perform for the main 3d pass. pub depth_load_op: Camera3dDepthLoadOp, @@ -30,7 +42,7 @@ pub struct Camera3d { /// /// Roughly corresponds to how many “layers of transparency” are rendered for screen space /// specular transmissive objects. Each step requires making one additional - /// texture copy, so it's recommended to keep this number to a resonably low value. Defaults to `1`. + /// texture copy, so it's recommended to keep this number to a reasonably low value. Defaults to `1`. /// /// ### Notes /// @@ -139,6 +151,10 @@ pub enum ScreenSpaceTransmissionQuality { /// The camera coordinate space is right-handed x-right, y-up, z-back. /// This means "forward" is -Z. #[derive(Bundle, Clone)] +#[deprecated( + since = "0.15.0", + note = "Use the `Camera3d` component instead. Inserting it will now also insert the other components required by it automatically." +)] pub struct Camera3dBundle { pub camera: Camera, pub camera_render_graph: CameraRenderGraph, diff --git a/crates/bevy_core_pipeline/src/core_3d/mod.rs b/crates/bevy_core_pipeline/src/core_3d/mod.rs index 71f1f031979d51..2fb0cfac437b19 100644 --- a/crates/bevy_core_pipeline/src/core_3d/mod.rs +++ b/crates/bevy_core_pipeline/src/core_3d/mod.rs @@ -65,14 +65,21 @@ pub const DEPTH_TEXTURE_SAMPLING_SUPPORTED: bool = true; use core::ops::Range; -use bevy_asset::{AssetId, UntypedAssetId}; -use bevy_color::LinearRgba; +use bevy_render::{ + batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport}, + mesh::allocator::SlabId, + render_phase::PhaseItemBinKey, + view::GpuCulling, +}; pub use camera_3d::*; pub use main_opaque_pass_3d_node::*; pub use main_transparent_pass_3d_node::*; use bevy_app::{App, Plugin, PostUpdate}; +use bevy_asset::{AssetId, UntypedAssetId}; +use bevy_color::LinearRgba; use bevy_ecs::{entity::EntityHashSet, prelude::*}; +use bevy_image::{BevyDefault, Image}; use bevy_math::FloatOrd; use bevy_render::{ camera::{Camera, ExtractedCamera}, @@ -85,13 +92,13 @@ use bevy_render::{ ViewSortedRenderPhases, }, render_resource::{ - BindGroupId, CachedRenderPipelineId, Extent3d, FilterMode, Sampler, SamplerDescriptor, - Texture, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, TextureView, + CachedRenderPipelineId, Extent3d, FilterMode, Sampler, SamplerDescriptor, Texture, + TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, TextureView, }, renderer::RenderDevice, - texture::{BevyDefault, ColorAttachment, Image, TextureCache}, + sync_world::{MainEntity, RenderEntity}, + texture::{ColorAttachment, TextureCache}, view::{ExtractedView, ViewDepthTexture, ViewTarget}, - world_sync::RenderEntity, Extract, ExtractSchedule, Render, RenderApp, RenderSet, }; use bevy_utils::{tracing::warn, HashMap}; @@ -214,7 +221,7 @@ pub struct Opaque3d { pub key: Opaque3dBinKey, /// An entity from which data will be fetched, including the mesh if /// applicable. - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), /// The ranges of instances. pub batch_range: Range, /// An extra index, which is either a dynamic offset or an index in the @@ -222,39 +229,79 @@ pub struct Opaque3d { pub extra_index: PhaseItemExtraIndex, } -/// Data that must be identical in order to batch phase items together. +/// Information that must be identical in order to place opaque meshes in the +/// same *batch set*. +/// +/// A batch set is a set of batches that can be multi-drawn together, if +/// multi-draw is in use. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Opaque3dBinKey { +pub struct Opaque3dBatchSetKey { /// The identifier of the render pipeline. pub pipeline: CachedRenderPipelineId, /// The function used to draw. pub draw_function: DrawFunctionId, + /// The ID of a bind group specific to the material instance. + /// + /// In the case of PBR, this is the `MaterialBindGroupIndex`. + pub material_bind_group_index: Option, + + /// The ID of the slab of GPU memory that contains vertex data. + /// + /// For non-mesh items, you can fill this with 0 if your items can be + /// multi-drawn, or with a unique value if they can't. + pub vertex_slab: SlabId, + + /// The ID of the slab of GPU memory that contains index data, if present. + /// + /// For non-mesh items, you can safely fill this with `None`. + pub index_slab: Option, + + /// The lightmap, if present. + pub lightmap_image: Option>, +} + +/// Data that must be identical in order to *batch* phase items together. +/// +/// Note that a *batch set* (if multi-draw is in use) contains multiple batches. +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Opaque3dBinKey { + /// The key of the *batch set*. + /// + /// As batches belong to a batch set, meshes in a batch must obviously be + /// able to be placed in a single batch set. + pub batch_set_key: Opaque3dBatchSetKey, + /// The asset that this phase item is associated with. /// /// Normally, this is the ID of the mesh, but for non-mesh items it might be /// the ID of another type of asset. pub asset_id: UntypedAssetId, +} - /// The ID of a bind group specific to the material. - /// - /// In the case of PBR, this is the `MaterialBindGroupId`. - pub material_bind_group_id: Option, +impl PhaseItemBinKey for Opaque3dBinKey { + type BatchSetKey = Opaque3dBatchSetKey; - /// The lightmap, if present. - pub lightmap_image: Option>, + fn get_batch_set_key(&self) -> Option { + Some(self.batch_set_key.clone()) + } } impl PhaseItem for Opaque3d { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + #[inline] + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -268,7 +315,7 @@ impl PhaseItem for Opaque3d { } fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } fn batch_range_and_extra_index_mut(&mut self) -> (&mut Range, &mut PhaseItemExtraIndex) { @@ -282,7 +329,7 @@ impl BinnedPhaseItem for Opaque3d { #[inline] fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -298,13 +345,13 @@ impl BinnedPhaseItem for Opaque3d { impl CachedRenderPipelinePhaseItem for Opaque3d { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } pub struct AlphaMask3d { pub key: OpaqueNoLightmap3dBinKey, - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, } @@ -312,12 +359,16 @@ pub struct AlphaMask3d { impl PhaseItem for AlphaMask3d { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -332,7 +383,7 @@ impl PhaseItem for AlphaMask3d { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -347,7 +398,7 @@ impl BinnedPhaseItem for AlphaMask3d { #[inline] fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -363,14 +414,14 @@ impl BinnedPhaseItem for AlphaMask3d { impl CachedRenderPipelinePhaseItem for AlphaMask3d { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } pub struct Transmissive3d { pub distance: f32, pub pipeline: CachedRenderPipelineId, - pub entity: Entity, + pub entity: (Entity, MainEntity), pub draw_function: DrawFunctionId, pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, @@ -390,7 +441,12 @@ impl PhaseItem for Transmissive3d { #[inline] fn entity(&self) -> Entity { - self.entity + self.entity.0 + } + + #[inline] + fn main_entity(&self) -> MainEntity { + self.entity.1 } #[inline] @@ -410,7 +466,7 @@ impl PhaseItem for Transmissive3d { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -444,7 +500,7 @@ impl CachedRenderPipelinePhaseItem for Transmissive3d { pub struct Transparent3d { pub distance: f32, pub pipeline: CachedRenderPipelineId, - pub entity: Entity, + pub entity: (Entity, MainEntity), pub draw_function: DrawFunctionId, pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, @@ -453,7 +509,11 @@ pub struct Transparent3d { impl PhaseItem for Transparent3d { #[inline] fn entity(&self) -> Entity { - self.entity + self.entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.entity.1 } #[inline] @@ -473,7 +533,7 @@ impl PhaseItem for Transparent3d { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -509,19 +569,27 @@ pub fn extract_core_3d_camera_phases( mut alpha_mask_3d_phases: ResMut>, mut transmissive_3d_phases: ResMut>, mut transparent_3d_phases: ResMut>, - cameras_3d: Extract>>, + cameras_3d: Extract), With>>, mut live_entities: Local, + gpu_preprocessing_support: Res, ) { live_entities.clear(); - for (render_entity, camera) in &cameras_3d { + for (entity, camera, has_gpu_culling) in &cameras_3d { if !camera.is_active { continue; } - let entity = render_entity.id(); - opaque_3d_phases.insert_or_clear(entity); - alpha_mask_3d_phases.insert_or_clear(entity); + // If GPU culling is in use, use it (and indirect mode); otherwise, just + // preprocess the meshes. + let gpu_preprocessing_mode = gpu_preprocessing_support.min(if has_gpu_culling { + GpuPreprocessingMode::Culling + } else { + GpuPreprocessingMode::PreprocessingOnly + }); + + opaque_3d_phases.insert_or_clear(entity, gpu_preprocessing_mode); + alpha_mask_3d_phases.insert_or_clear(entity, gpu_preprocessing_mode); transmissive_3d_phases.insert_or_clear(entity); transparent_3d_phases.insert_or_clear(entity); @@ -535,6 +603,8 @@ pub fn extract_core_3d_camera_phases( } // Extract the render phases for the prepass + +#[allow(clippy::too_many_arguments)] pub fn extract_camera_prepass_phase( mut commands: Commands, mut opaque_3d_prepass_phases: ResMut>, @@ -544,8 +614,9 @@ pub fn extract_camera_prepass_phase( cameras_3d: Extract< Query< ( - &RenderEntity, + RenderEntity, &Camera, + Has, Has, Has, Has, @@ -555,12 +626,14 @@ pub fn extract_camera_prepass_phase( >, >, mut live_entities: Local, + gpu_preprocessing_support: Res, ) { live_entities.clear(); for ( - render_entity, + entity, camera, + gpu_culling, depth_prepass, normal_prepass, motion_vector_prepass, @@ -571,18 +644,25 @@ pub fn extract_camera_prepass_phase( continue; } - let entity = render_entity.id(); + // If GPU culling is in use, use it (and indirect mode); otherwise, just + // preprocess the meshes. + let gpu_preprocessing_mode = gpu_preprocessing_support.min(if gpu_culling { + GpuPreprocessingMode::Culling + } else { + GpuPreprocessingMode::PreprocessingOnly + }); + if depth_prepass || normal_prepass || motion_vector_prepass { - opaque_3d_prepass_phases.insert_or_clear(entity); - alpha_mask_3d_prepass_phases.insert_or_clear(entity); + opaque_3d_prepass_phases.insert_or_clear(entity, gpu_preprocessing_mode); + alpha_mask_3d_prepass_phases.insert_or_clear(entity, gpu_preprocessing_mode); } else { opaque_3d_prepass_phases.remove(&entity); alpha_mask_3d_prepass_phases.remove(&entity); } if deferred_prepass { - opaque_3d_deferred_phases.insert_or_clear(entity); - alpha_mask_3d_deferred_phases.insert_or_clear(entity); + opaque_3d_deferred_phases.insert_or_clear(entity, gpu_preprocessing_mode); + alpha_mask_3d_deferred_phases.insert_or_clear(entity, gpu_preprocessing_mode); } else { opaque_3d_deferred_phases.remove(&entity); alpha_mask_3d_deferred_phases.remove(&entity); @@ -590,7 +670,8 @@ pub fn extract_camera_prepass_phase( live_entities.insert(entity); commands - .get_or_spawn(entity) + .get_entity(entity) + .expect("Camera entity wasn't synced.") .insert_if(DepthPrepass, || depth_prepass) .insert_if(NormalPrepass, || normal_prepass) .insert_if(MotionVectorPrepass, || motion_vector_prepass) @@ -620,7 +701,7 @@ pub fn prepare_core_3d_depth_textures( &Msaa, )>, ) { - let mut render_target_usage = HashMap::default(); + let mut render_target_usage = >::default(); for (view, camera, depth_prepass, camera_3d, _msaa) in &views_3d { if !opaque_3d_phases.contains_key(&view) || !alpha_mask_3d_phases.contains_key(&view) @@ -642,7 +723,7 @@ pub fn prepare_core_3d_depth_textures( .or_insert_with(|| usage); } - let mut textures = HashMap::default(); + let mut textures = >::default(); for (entity, camera, _, camera_3d, msaa) in &views_3d { let Some(physical_target_size) = camera.physical_target_size else { continue; @@ -705,7 +786,7 @@ pub fn prepare_core_3d_transmission_textures( transparent_3d_phases: Res>, views_3d: Query<(Entity, &ExtractedCamera, &Camera3d, &ExtractedView)>, ) { - let mut textures = HashMap::default(); + let mut textures = >::default(); for (entity, camera, camera_3d, view) in &views_3d { if !opaque_3d_phases.contains_key(&entity) || !alpha_mask_3d_phases.contains_key(&entity) @@ -813,11 +894,11 @@ pub fn prepare_prepass_textures( Has, )>, ) { - let mut depth_textures = HashMap::default(); - let mut normal_textures = HashMap::default(); - let mut deferred_textures = HashMap::default(); - let mut deferred_lighting_id_textures = HashMap::default(); - let mut motion_vectors_textures = HashMap::default(); + let mut depth_textures = >::default(); + let mut normal_textures = >::default(); + let mut deferred_textures = >::default(); + let mut deferred_lighting_id_textures = >::default(); + let mut motion_vectors_textures = >::default(); for ( entity, camera, diff --git a/crates/bevy_core_pipeline/src/deferred/copy_lighting_id.rs b/crates/bevy_core_pipeline/src/deferred/copy_lighting_id.rs index 4f5462d1f52bad..f645d22092bfa8 100644 --- a/crates/bevy_core_pipeline/src/deferred/copy_lighting_id.rs +++ b/crates/bevy_core_pipeline/src/deferred/copy_lighting_id.rs @@ -160,6 +160,7 @@ impl FromWorld for CopyDeferredLightingIdPipeline { }), multisample: MultisampleState::default(), push_constant_ranges: vec![], + zero_initialize_workgroup_memory: false, }); Self { diff --git a/crates/bevy_core_pipeline/src/deferred/mod.rs b/crates/bevy_core_pipeline/src/deferred/mod.rs index ef20bf7ee59d69..1ddc66a285c202 100644 --- a/crates/bevy_core_pipeline/src/deferred/mod.rs +++ b/crates/bevy_core_pipeline/src/deferred/mod.rs @@ -3,7 +3,9 @@ pub mod node; use core::ops::Range; +use crate::prepass::OpaqueNoLightmap3dBinKey; use bevy_ecs::prelude::*; +use bevy_render::sync_world::MainEntity; use bevy_render::{ render_phase::{ BinnedPhaseItem, CachedRenderPipelinePhaseItem, DrawFunctionId, PhaseItem, @@ -12,8 +14,6 @@ use bevy_render::{ render_resource::{CachedRenderPipelineId, TextureFormat}, }; -use crate::prepass::OpaqueNoLightmap3dBinKey; - pub const DEFERRED_PREPASS_FORMAT: TextureFormat = TextureFormat::Rgba32Uint; pub const DEFERRED_LIGHTING_PASS_ID_FORMAT: TextureFormat = TextureFormat::R8Uint; pub const DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT: TextureFormat = TextureFormat::Depth16Unorm; @@ -26,7 +26,7 @@ pub const DEFERRED_LIGHTING_PASS_ID_DEPTH_FORMAT: TextureFormat = TextureFormat: #[derive(PartialEq, Eq, Hash)] pub struct Opaque3dDeferred { pub key: OpaqueNoLightmap3dBinKey, - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, } @@ -34,12 +34,16 @@ pub struct Opaque3dDeferred { impl PhaseItem for Opaque3dDeferred { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -54,7 +58,7 @@ impl PhaseItem for Opaque3dDeferred { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -69,7 +73,7 @@ impl BinnedPhaseItem for Opaque3dDeferred { #[inline] fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -85,7 +89,7 @@ impl BinnedPhaseItem for Opaque3dDeferred { impl CachedRenderPipelinePhaseItem for Opaque3dDeferred { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } @@ -96,7 +100,7 @@ impl CachedRenderPipelinePhaseItem for Opaque3dDeferred { /// Used to render all meshes with a material with an alpha mask. pub struct AlphaMask3dDeferred { pub key: OpaqueNoLightmap3dBinKey, - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, } @@ -104,12 +108,17 @@ pub struct AlphaMask3dDeferred { impl PhaseItem for AlphaMask3dDeferred { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + #[inline] + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -124,7 +133,7 @@ impl PhaseItem for AlphaMask3dDeferred { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -138,7 +147,7 @@ impl BinnedPhaseItem for AlphaMask3dDeferred { fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -154,6 +163,6 @@ impl BinnedPhaseItem for AlphaMask3dDeferred { impl CachedRenderPipelinePhaseItem for AlphaMask3dDeferred { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } diff --git a/crates/bevy_core_pipeline/src/dof/mod.rs b/crates/bevy_core_pipeline/src/dof/mod.rs index 100b7f20ffc4d0..06cbbe3e9d312a 100644 --- a/crates/bevy_core_pipeline/src/dof/mod.rs +++ b/crates/bevy_core_pipeline/src/dof/mod.rs @@ -26,6 +26,7 @@ use bevy_ecs::{ system::{lifetimeless::Read, Commands, Query, Res, ResMut, Resource}, world::{FromWorld, World}, }; +use bevy_image::BevyDefault as _; use bevy_math::ops; use bevy_reflect::{prelude::ReflectDefault, Reflect}; use bevy_render::{ @@ -46,12 +47,13 @@ use bevy_render::{ TextureDescriptor, TextureDimension, TextureFormat, TextureSampleType, TextureUsages, }, renderer::{RenderContext, RenderDevice}, - texture::{BevyDefault, CachedTexture, TextureCache}, + sync_component::SyncComponentPlugin, + sync_world::RenderEntity, + texture::{CachedTexture, TextureCache}, view::{ prepare_view_targets, ExtractedView, Msaa, ViewDepthTexture, ViewTarget, ViewUniform, ViewUniformOffset, ViewUniforms, }, - world_sync::RenderEntity, Extract, ExtractSchedule, Render, RenderApp, RenderSet, }; use bevy_utils::{info_once, prelude::default, warn_once}; @@ -211,6 +213,8 @@ impl Plugin for DepthOfFieldPlugin { app.register_type::(); app.add_plugins(UniformComponentPlugin::::default()); + app.add_plugins(SyncComponentPlugin::::default()); + let Some(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; @@ -803,6 +807,7 @@ impl SpecializedRenderPipeline for DepthOfFieldPipeline { }, targets, }), + zero_initialize_workgroup_memory: false, } } } @@ -810,7 +815,7 @@ impl SpecializedRenderPipeline for DepthOfFieldPipeline { /// Extracts all [`DepthOfField`] components into the render world. fn extract_depth_of_field_settings( mut commands: Commands, - mut query: Extract>, + mut query: Extract>, ) { if !DEPTH_TEXTURE_SAMPLING_SUPPORTED { info_once!( @@ -820,9 +825,21 @@ fn extract_depth_of_field_settings( } for (entity, depth_of_field, projection) in query.iter_mut() { - let entity = entity.id(); + let mut entity_commands = commands + .get_entity(entity) + .expect("Depth of field entity wasn't synced."); + // Depth of field is nonsensical without a perspective projection. let Projection::Perspective(ref perspective_projection) = *projection else { + // TODO: needs better strategy for cleaning up + entity_commands.remove::<( + DepthOfField, + DepthOfFieldUniform, + // components added in prepare systems (because `DepthOfFieldNode` does not query extracted components) + DepthOfFieldPipelines, + AuxiliaryDepthOfFieldTexture, + ViewDepthOfFieldBindGroupLayouts, + )>(); continue; }; @@ -830,7 +847,7 @@ fn extract_depth_of_field_settings( calculate_focal_length(depth_of_field.sensor_height, perspective_projection.fov); // Convert `DepthOfField` to `DepthOfFieldUniform`. - commands.get_or_spawn(entity).insert(( + entity_commands.insert(( *depth_of_field, DepthOfFieldUniform { focal_distance: depth_of_field.focal_distance, diff --git a/crates/bevy_core_pipeline/src/fxaa/mod.rs b/crates/bevy_core_pipeline/src/fxaa/mod.rs index a448444bafc4ba..547b59762442f2 100644 --- a/crates/bevy_core_pipeline/src/fxaa/mod.rs +++ b/crates/bevy_core_pipeline/src/fxaa/mod.rs @@ -6,6 +6,7 @@ use crate::{ use bevy_app::prelude::*; use bevy_asset::{load_internal_asset, Handle}; use bevy_ecs::prelude::*; +use bevy_image::BevyDefault as _; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ extract_component::{ExtractComponent, ExtractComponentPlugin}, @@ -16,7 +17,6 @@ use bevy_render::{ *, }, renderer::RenderDevice, - texture::BevyDefault, view::{ExtractedView, ViewTarget}, Render, RenderApp, RenderSet, }; @@ -196,6 +196,7 @@ impl SpecializedRenderPipeline for FxaaPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/lib.rs b/crates/bevy_core_pipeline/src/lib.rs index 0ce36763d6e660..731b50a7576eab 100644 --- a/crates/bevy_core_pipeline/src/lib.rs +++ b/crates/bevy_core_pipeline/src/lib.rs @@ -19,6 +19,7 @@ pub mod fullscreen_vertex_shader; pub mod fxaa; pub mod motion_blur; pub mod msaa_writeback; +pub mod oit; pub mod post_process; pub mod prepass; mod skybox; @@ -45,6 +46,7 @@ pub mod experimental { /// The core pipeline prelude. /// /// This includes the most common types in this crate, re-exported for your convenience. +#[expect(deprecated)] pub mod prelude { #[doc(hidden)] pub use crate::{ @@ -74,6 +76,7 @@ use crate::{ use bevy_app::{App, Plugin}; use bevy_asset::load_internal_asset; use bevy_render::prelude::Shader; +use oit::OrderIndependentTransparencyPlugin; #[derive(Default)] pub struct CorePipelinePlugin; @@ -106,6 +109,7 @@ impl Plugin for CorePipelinePlugin { DepthOfFieldPlugin, SmaaPlugin, PostProcessingPlugin, + OrderIndependentTransparencyPlugin, )); } } diff --git a/crates/bevy_core_pipeline/src/motion_blur/mod.rs b/crates/bevy_core_pipeline/src/motion_blur/mod.rs index c5a4a9f212e5a3..c6eb8524ca3efb 100644 --- a/crates/bevy_core_pipeline/src/motion_blur/mod.rs +++ b/crates/bevy_core_pipeline/src/motion_blur/mod.rs @@ -11,7 +11,10 @@ use crate::{ use bevy_app::{App, Plugin}; use bevy_asset::{load_internal_asset, Handle}; use bevy_ecs::{ - bundle::Bundle, component::Component, query::With, reflect::ReflectComponent, + bundle::Bundle, + component::{require, Component}, + query::With, + reflect::ReflectComponent, schedule::IntoSystemConfigs, }; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; @@ -59,11 +62,11 @@ pub struct MotionBlurBundle { /// camera. /// /// ``` -/// # use bevy_core_pipeline::{core_3d::Camera3dBundle, motion_blur::MotionBlur}; +/// # use bevy_core_pipeline::{core_3d::Camera3d, motion_blur::MotionBlur}; /// # use bevy_ecs::prelude::*; /// # fn test(mut commands: Commands) { /// commands.spawn(( -/// Camera3dBundle::default(), +/// Camera3d::default(), /// MotionBlur::default(), /// )); /// # } diff --git a/crates/bevy_core_pipeline/src/motion_blur/motion_blur.wgsl b/crates/bevy_core_pipeline/src/motion_blur/motion_blur.wgsl index 346b4cdf8ffba4..147d3b8c5e6237 100644 --- a/crates/bevy_core_pipeline/src/motion_blur/motion_blur.wgsl +++ b/crates/bevy_core_pipeline/src/motion_blur/motion_blur.wgsl @@ -66,7 +66,7 @@ fn fragment( // This means that for a frame time of 20ms, the shutter is only open for 10ms. // // Using a shutter angle larger than 1.0 is non-physical, objects would need to move further - // than they physically travelled during a frame, which is not possible. Note: we allow values + // than they physically traveled during a frame, which is not possible. Note: we allow values // larger than 1.0 because it may be desired for artistic reasons. let exposure_vector = shutter_angle * this_motion_vector; diff --git a/crates/bevy_core_pipeline/src/motion_blur/pipeline.rs b/crates/bevy_core_pipeline/src/motion_blur/pipeline.rs index cff26a9e22f170..8109beeb4eb3a9 100644 --- a/crates/bevy_core_pipeline/src/motion_blur/pipeline.rs +++ b/crates/bevy_core_pipeline/src/motion_blur/pipeline.rs @@ -5,6 +5,7 @@ use bevy_ecs::{ system::{Commands, Query, Res, ResMut, Resource}, world::FromWorld, }; +use bevy_image::BevyDefault as _; use bevy_render::{ globals::GlobalsUniform, render_resource::{ @@ -19,7 +20,6 @@ use bevy_render::{ TextureFormat, TextureSampleType, }, renderer::RenderDevice, - texture::BevyDefault, view::{ExtractedView, Msaa, ViewTarget}, }; @@ -141,6 +141,7 @@ impl SpecializedRenderPipeline for MotionBlurPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: vec![], + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/oit/mod.rs b/crates/bevy_core_pipeline/src/oit/mod.rs new file mode 100644 index 00000000000000..14e8b8d4e36e3d --- /dev/null +++ b/crates/bevy_core_pipeline/src/oit/mod.rs @@ -0,0 +1,315 @@ +//! Order Independent Transparency (OIT) for 3d rendering. See [`OrderIndependentTransparencyPlugin`] for more details. + +use bevy_app::prelude::*; +use bevy_asset::{load_internal_asset, Handle}; +use bevy_ecs::{component::*, prelude::*}; +use bevy_math::UVec2; +use bevy_reflect::Reflect; +use bevy_render::{ + camera::{Camera, ExtractedCamera}, + extract_component::{ExtractComponent, ExtractComponentPlugin}, + render_graph::{RenderGraphApp, ViewNodeRunner}, + render_resource::{ + BufferUsages, BufferVec, DynamicUniformBuffer, Shader, ShaderType, TextureUsages, + }, + renderer::{RenderDevice, RenderQueue}, + view::Msaa, + Render, RenderApp, RenderSet, +}; +use bevy_utils::{ + tracing::{trace, warn}, + HashSet, Instant, +}; +use bevy_window::PrimaryWindow; +use resolve::{ + node::{OitResolveNode, OitResolvePass}, + OitResolvePlugin, +}; + +use crate::core_3d::{ + graph::{Core3d, Node3d}, + Camera3d, +}; + +/// Module that defines the necessary systems to resolve the OIT buffer and render it to the screen. +pub mod resolve; + +/// Shader handle for the shader that draws the transparent meshes to the OIT layers buffer. +pub const OIT_DRAW_SHADER_HANDLE: Handle = Handle::weak_from_u128(4042527984320512); + +/// Used to identify which camera will use OIT to render transparent meshes +/// and to configure OIT. +// TODO consider supporting multiple OIT techniques like WBOIT, Moment Based OIT, +// depth peeling, stochastic transparency, ray tracing etc. +// This should probably be done by adding an enum to this component. +// We use the same struct to pass on the settings to the drawing shader. +#[derive(Clone, Copy, ExtractComponent, Reflect, ShaderType)] +pub struct OrderIndependentTransparencySettings { + /// Controls how many layers will be used to compute the blending. + /// The more layers you use the more memory it will use but it will also give better results. + /// 8 is generally recommended, going above 32 is probably not worth it in the vast majority of cases + pub layer_count: i32, + /// Threshold for which fragments will be added to the blending layers. + /// This can be tweaked to optimize quality / layers count. Higher values will + /// allow lower number of layers and a better performance, compromising quality. + pub alpha_threshold: f32, +} + +impl Default for OrderIndependentTransparencySettings { + fn default() -> Self { + Self { + layer_count: 8, + alpha_threshold: 0.0, + } + } +} + +// OrderIndependentTransparencySettings is also a Component. We explicitly implement the trait so +// we can hook on_add to issue a warning in case `layer_count` is seemingly too high. +impl Component for OrderIndependentTransparencySettings { + const STORAGE_TYPE: StorageType = StorageType::SparseSet; + type Mutability = Mutable; + + fn register_component_hooks(hooks: &mut ComponentHooks) { + hooks.on_add(|world, entity, _| { + if let Some(value) = world.get::(entity) { + if value.layer_count > 32 { + warn!("OrderIndependentTransparencySettings layer_count set to {} might be too high.", value.layer_count); + } + } + }); + } +} + +/// A plugin that adds support for Order Independent Transparency (OIT). +/// This can correctly render some scenes that would otherwise have artifacts due to alpha blending, but uses more memory. +/// +/// To enable OIT for a camera you need to add the [`OrderIndependentTransparencySettings`] component to it. +/// +/// If you want to use OIT for your custom material you need to call `oit_draw(position, color)` in your fragment shader. +/// You also need to make sure that your fragment shader doesn't output any colors. +/// +/// # Implementation details +/// This implementation uses 2 passes. +/// +/// The first pass writes the depth and color of all the fragments to a big buffer. +/// The buffer contains N layers for each pixel, where N can be set with [`OrderIndependentTransparencySettings::layer_count`]. +/// This pass is essentially a forward pass. +/// +/// The second pass is a single fullscreen triangle pass that sorts all the fragments then blends them together +/// and outputs the result to the screen. +pub struct OrderIndependentTransparencyPlugin; +impl Plugin for OrderIndependentTransparencyPlugin { + fn build(&self, app: &mut App) { + load_internal_asset!( + app, + OIT_DRAW_SHADER_HANDLE, + "oit_draw.wgsl", + Shader::from_wgsl + ); + + app.add_plugins(( + ExtractComponentPlugin::::default(), + OitResolvePlugin, + )) + .add_systems(Update, check_msaa) + .add_systems(Last, configure_depth_texture_usages) + .register_type::(); + + let Some(render_app) = app.get_sub_app_mut(RenderApp) else { + return; + }; + + render_app.add_systems( + Render, + prepare_oit_buffers.in_set(RenderSet::PrepareResources), + ); + + render_app + .add_render_graph_node::>(Core3d, OitResolvePass) + .add_render_graph_edges( + Core3d, + ( + Node3d::MainTransparentPass, + OitResolvePass, + Node3d::EndMainPass, + ), + ); + } + + fn finish(&self, app: &mut App) { + let Some(render_app) = app.get_sub_app_mut(RenderApp) else { + return; + }; + + render_app.init_resource::(); + } +} + +// WARN This should only happen for cameras with the [`OrderIndependentTransparencySettings`] component +// but when multiple cameras are present on the same window +// bevy reuses the same depth texture so we need to set this on all cameras with the same render target. +fn configure_depth_texture_usages( + p: Query>, + cameras: Query<(&Camera, Has)>, + mut new_cameras: Query<(&mut Camera3d, &Camera), Added>, +) { + if new_cameras.is_empty() { + return; + } + + // Find all the render target that potentially uses OIT + let primary_window = p.get_single().ok(); + let mut render_target_has_oit = >::default(); + for (camera, has_oit) in &cameras { + if has_oit { + render_target_has_oit.insert(camera.target.normalize(primary_window)); + } + } + + // Update the depth texture usage for cameras with a render target that has OIT + for (mut camera_3d, camera) in &mut new_cameras { + if render_target_has_oit.contains(&camera.target.normalize(primary_window)) { + let mut usages = TextureUsages::from(camera_3d.depth_texture_usages); + usages |= TextureUsages::RENDER_ATTACHMENT | TextureUsages::TEXTURE_BINDING; + camera_3d.depth_texture_usages = usages.into(); + } + } +} + +fn check_msaa(cameras: Query<&Msaa, With>) { + for msaa in &cameras { + if msaa.samples() > 1 { + panic!("MSAA is not supported when using OrderIndependentTransparency"); + } + } +} + +/// Holds the buffers that contain the data of all OIT layers. +/// We use one big buffer for the entire app. Each camera will reuse it so it will +/// always be the size of the biggest OIT enabled camera. +#[derive(Resource)] +pub struct OitBuffers { + /// The OIT layers containing depth and color for each fragments. + /// This is essentially used as a 3d array where xy is the screen coordinate and z is + /// the list of fragments rendered with OIT. + pub layers: BufferVec, + /// Buffer containing the index of the last layer that was written for each fragment. + pub layer_ids: BufferVec, + pub settings: DynamicUniformBuffer, +} + +impl FromWorld for OitBuffers { + fn from_world(world: &mut World) -> Self { + let render_device = world.resource::(); + let render_queue = world.resource::(); + + // initialize buffers with something so there's a valid binding + + let mut layers = BufferVec::new(BufferUsages::COPY_DST | BufferUsages::STORAGE); + layers.set_label(Some("oit_layers")); + layers.reserve(1, render_device); + layers.write_buffer(render_device, render_queue); + + let mut layer_ids = BufferVec::new(BufferUsages::COPY_DST | BufferUsages::STORAGE); + layer_ids.set_label(Some("oit_layer_ids")); + layer_ids.reserve(1, render_device); + layer_ids.write_buffer(render_device, render_queue); + + let mut settings = DynamicUniformBuffer::default(); + settings.set_label(Some("oit_settings")); + + Self { + layers, + layer_ids, + settings, + } + } +} + +#[derive(Component)] +pub struct OrderIndependentTransparencySettingsOffset { + pub offset: u32, +} + +/// This creates or resizes the oit buffers for each camera. +/// It will always create one big buffer that's as big as the biggest buffer needed. +/// Cameras with smaller viewports or less layers will simply use the big buffer and ignore the rest. +#[allow(clippy::type_complexity)] +pub fn prepare_oit_buffers( + mut commands: Commands, + render_device: Res, + render_queue: Res, + cameras: Query< + (&ExtractedCamera, &OrderIndependentTransparencySettings), + ( + Changed, + Changed, + ), + >, + camera_oit_uniforms: Query<(Entity, &OrderIndependentTransparencySettings)>, + mut buffers: ResMut, +) { + // Get the max buffer size for any OIT enabled camera + let mut max_layer_ids_size = usize::MIN; + let mut max_layers_size = usize::MIN; + for (camera, settings) in &cameras { + let Some(size) = camera.physical_target_size else { + continue; + }; + + let layer_count = settings.layer_count as usize; + let size = (size.x * size.y) as usize; + max_layer_ids_size = max_layer_ids_size.max(size); + max_layers_size = max_layers_size.max(size * layer_count); + } + + // Create or update the layers buffer based on the max size + if buffers.layers.capacity() < max_layers_size { + let start = Instant::now(); + buffers.layers.reserve(max_layers_size, &render_device); + let remaining = max_layers_size - buffers.layers.capacity(); + for _ in 0..remaining { + buffers.layers.push(UVec2::ZERO); + } + buffers.layers.write_buffer(&render_device, &render_queue); + trace!( + "OIT layers buffer updated in {:.01}ms with total size {} MiB", + start.elapsed().as_millis(), + buffers.layers.capacity() * size_of::() / 1024 / 1024, + ); + } + + // Create or update the layer_ids buffer based on the max size + if buffers.layer_ids.capacity() < max_layer_ids_size { + let start = Instant::now(); + buffers + .layer_ids + .reserve(max_layer_ids_size, &render_device); + let remaining = max_layer_ids_size - buffers.layer_ids.capacity(); + for _ in 0..remaining { + buffers.layer_ids.push(0); + } + buffers + .layer_ids + .write_buffer(&render_device, &render_queue); + trace!( + "OIT layer ids buffer updated in {:.01}ms with total size {} MiB", + start.elapsed().as_millis(), + buffers.layer_ids.capacity() * size_of::() / 1024 / 1024, + ); + } + + if let Some(mut writer) = buffers.settings.get_writer( + camera_oit_uniforms.iter().len(), + &render_device, + &render_queue, + ) { + for (entity, settings) in &camera_oit_uniforms { + let offset = writer.write(settings); + commands + .entity(entity) + .insert(OrderIndependentTransparencySettingsOffset { offset }); + } + } +} diff --git a/crates/bevy_core_pipeline/src/oit/oit_draw.wgsl b/crates/bevy_core_pipeline/src/oit/oit_draw.wgsl new file mode 100644 index 00000000000000..8e4c88ba2d64cc --- /dev/null +++ b/crates/bevy_core_pipeline/src/oit/oit_draw.wgsl @@ -0,0 +1,48 @@ +#define_import_path bevy_core_pipeline::oit + +#import bevy_pbr::mesh_view_bindings::{view, oit_layers, oit_layer_ids, oit_settings} + +#ifdef OIT_ENABLED +// Add the fragment to the oit buffer +fn oit_draw(position: vec4f, color: vec4f) { + // Don't add fully transparent fragments to the list + // because we don't want to have to sort them in the resolve pass + if color.a < oit_settings.alpha_threshold { + return; + } + // get the index of the current fragment relative to the screen size + let screen_index = i32(floor(position.x) + floor(position.y) * view.viewport.z); + // get the size of the buffer. + // It's always the size of the screen + let buffer_size = i32(view.viewport.z * view.viewport.w); + + // gets the layer index of the current fragment + var layer_id = atomicAdd(&oit_layer_ids[screen_index], 1); + // exit early if we've reached the maximum amount of fragments per layer + if layer_id >= oit_settings.layers_count { + // force to store the oit_layers_count to make sure we don't + // accidentally increase the index above the maximum value + atomicStore(&oit_layer_ids[screen_index], oit_settings.layers_count); + // TODO for tail blending we should return the color here + return; + } + + // get the layer_index from the screen + let layer_index = screen_index + layer_id * buffer_size; + let rgb9e5_color = bevy_pbr::rgb9e5::vec3_to_rgb9e5_(color.rgb); + let depth_alpha = pack_24bit_depth_8bit_alpha(position.z, color.a); + oit_layers[layer_index] = vec2(rgb9e5_color, depth_alpha); +} +#endif // OIT_ENABLED + +fn pack_24bit_depth_8bit_alpha(depth: f32, alpha: f32) -> u32 { + let depth_bits = u32(saturate(depth) * f32(0xFFFFFFu) + 0.5); + let alpha_bits = u32(saturate(alpha) * f32(0xFFu) + 0.5); + return (depth_bits & 0xFFFFFFu) | ((alpha_bits & 0xFFu) << 24u); +} + +fn unpack_24bit_depth_8bit_alpha(packed: u32) -> vec2 { + let depth_bits = packed & 0xFFFFFFu; + let alpha_bits = (packed >> 24u) & 0xFFu; + return vec2(f32(depth_bits) / f32(0xFFFFFFu), f32(alpha_bits) / f32(0xFFu)); +} diff --git a/crates/bevy_core_pipeline/src/oit/resolve/mod.rs b/crates/bevy_core_pipeline/src/oit/resolve/mod.rs new file mode 100644 index 00000000000000..101f7b1ed941e3 --- /dev/null +++ b/crates/bevy_core_pipeline/src/oit/resolve/mod.rs @@ -0,0 +1,234 @@ +use crate::{ + fullscreen_vertex_shader::fullscreen_shader_vertex_state, + oit::OrderIndependentTransparencySettings, +}; +use bevy_app::Plugin; +use bevy_asset::{load_internal_asset, Handle}; +use bevy_derive::Deref; +use bevy_ecs::{ + entity::{EntityHashMap, EntityHashSet}, + prelude::*, +}; +use bevy_image::BevyDefault as _; +use bevy_render::{ + render_resource::{ + binding_types::{storage_buffer_sized, texture_depth_2d, uniform_buffer}, + BindGroup, BindGroupEntries, BindGroupLayout, BindGroupLayoutEntries, BlendComponent, + BlendState, CachedRenderPipelineId, ColorTargetState, ColorWrites, DownlevelFlags, + FragmentState, MultisampleState, PipelineCache, PrimitiveState, RenderPipelineDescriptor, + Shader, ShaderDefVal, ShaderStages, TextureFormat, + }, + renderer::{RenderAdapter, RenderDevice}, + view::{ExtractedView, ViewTarget, ViewUniform, ViewUniforms}, + Render, RenderApp, RenderSet, +}; +use bevy_utils::tracing::warn; + +use super::OitBuffers; + +/// Shader handle for the shader that sorts the OIT layers, blends the colors based on depth and renders them to the screen. +pub const OIT_RESOLVE_SHADER_HANDLE: Handle = Handle::weak_from_u128(7698420424769536); + +/// Contains the render node used to run the resolve pass. +pub mod node; + +/// Plugin needed to resolve the Order Independent Transparency (OIT) buffer to the screen. +pub struct OitResolvePlugin; +impl Plugin for OitResolvePlugin { + fn build(&self, app: &mut bevy_app::App) { + load_internal_asset!( + app, + OIT_RESOLVE_SHADER_HANDLE, + "oit_resolve.wgsl", + Shader::from_wgsl + ); + } + + fn finish(&self, app: &mut bevy_app::App) { + let Some(render_app) = app.get_sub_app_mut(RenderApp) else { + return; + }; + + if !render_app + .world() + .resource::() + .get_downlevel_capabilities() + .flags + .contains(DownlevelFlags::FRAGMENT_WRITABLE_STORAGE) + { + warn!("OrderIndependentTransparencyPlugin not loaded. GPU lacks support: DownlevelFlags::FRAGMENT_WRITABLE_STORAGE."); + return; + } + + render_app + .add_systems( + Render, + ( + queue_oit_resolve_pipeline.in_set(RenderSet::Queue), + prepare_oit_resolve_bind_group.in_set(RenderSet::PrepareBindGroups), + ), + ) + .init_resource::(); + } +} + +/// Bind group for the OIT resolve pass. +#[derive(Resource, Deref)] +pub struct OitResolveBindGroup(pub BindGroup); + +/// Bind group layouts used for the OIT resolve pass. +#[derive(Resource)] +pub struct OitResolvePipeline { + /// View bind group layout. + pub view_bind_group_layout: BindGroupLayout, + /// Depth bind group layout. + pub oit_depth_bind_group_layout: BindGroupLayout, +} + +impl FromWorld for OitResolvePipeline { + fn from_world(world: &mut World) -> Self { + let render_device = world.resource::(); + + let view_bind_group_layout = render_device.create_bind_group_layout( + "oit_resolve_bind_group_layout", + &BindGroupLayoutEntries::sequential( + ShaderStages::FRAGMENT, + ( + uniform_buffer::(true), + // layers + storage_buffer_sized(false, None), + // layer ids + storage_buffer_sized(false, None), + ), + ), + ); + + let oit_depth_bind_group_layout = render_device.create_bind_group_layout( + "oit_depth_bind_group_layout", + &BindGroupLayoutEntries::single(ShaderStages::FRAGMENT, texture_depth_2d()), + ); + OitResolvePipeline { + view_bind_group_layout, + oit_depth_bind_group_layout, + } + } +} + +#[derive(Component, Deref, Clone, Copy)] +pub struct OitResolvePipelineId(pub CachedRenderPipelineId); + +/// This key is used to cache the pipeline id and to specialize the render pipeline descriptor. +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub struct OitResolvePipelineKey { + hdr: bool, + layer_count: i32, +} + +#[allow(clippy::too_many_arguments)] +pub fn queue_oit_resolve_pipeline( + mut commands: Commands, + pipeline_cache: Res, + resolve_pipeline: Res, + views: Query< + ( + Entity, + &ExtractedView, + &OrderIndependentTransparencySettings, + ), + With, + >, + // Store the key with the id to make the clean up logic easier. + // This also means it will always replace the entry if the key changes so nothing to clean up. + mut cached_pipeline_id: Local>, +) { + let mut current_view_entities = EntityHashSet::default(); + for (e, view, oit_settings) in &views { + current_view_entities.insert(e); + let key = OitResolvePipelineKey { + hdr: view.hdr, + layer_count: oit_settings.layer_count, + }; + + if let Some((cached_key, id)) = cached_pipeline_id.get(&e) { + if *cached_key == key { + commands.entity(e).insert(OitResolvePipelineId(*id)); + continue; + } + } + + let desc = specialize_oit_resolve_pipeline(key, &resolve_pipeline); + + let pipeline_id = pipeline_cache.queue_render_pipeline(desc); + commands.entity(e).insert(OitResolvePipelineId(pipeline_id)); + cached_pipeline_id.insert(e, (key, pipeline_id)); + } + + // Clear cache for views that don't exist anymore. + for e in cached_pipeline_id.keys().copied().collect::>() { + if !current_view_entities.contains(&e) { + cached_pipeline_id.remove(&e); + } + } +} + +fn specialize_oit_resolve_pipeline( + key: OitResolvePipelineKey, + resolve_pipeline: &OitResolvePipeline, +) -> RenderPipelineDescriptor { + let format = if key.hdr { + ViewTarget::TEXTURE_FORMAT_HDR + } else { + TextureFormat::bevy_default() + }; + + RenderPipelineDescriptor { + label: Some("oit_resolve_pipeline".into()), + layout: vec![ + resolve_pipeline.view_bind_group_layout.clone(), + resolve_pipeline.oit_depth_bind_group_layout.clone(), + ], + fragment: Some(FragmentState { + entry_point: "fragment".into(), + shader: OIT_RESOLVE_SHADER_HANDLE, + shader_defs: vec![ShaderDefVal::UInt( + "LAYER_COUNT".into(), + key.layer_count as u32, + )], + targets: vec![Some(ColorTargetState { + format, + blend: Some(BlendState { + color: BlendComponent::OVER, + alpha: BlendComponent::OVER, + }), + write_mask: ColorWrites::ALL, + })], + }), + vertex: fullscreen_shader_vertex_state(), + primitive: PrimitiveState::default(), + depth_stencil: None, + multisample: MultisampleState::default(), + push_constant_ranges: vec![], + zero_initialize_workgroup_memory: false, + } +} + +pub fn prepare_oit_resolve_bind_group( + mut commands: Commands, + resolve_pipeline: Res, + render_device: Res, + view_uniforms: Res, + buffers: Res, +) { + if let (Some(binding), Some(layers_binding), Some(layer_ids_binding)) = ( + view_uniforms.uniforms.binding(), + buffers.layers.binding(), + buffers.layer_ids.binding(), + ) { + let bind_group = render_device.create_bind_group( + "oit_resolve_bind_group", + &resolve_pipeline.view_bind_group_layout, + &BindGroupEntries::sequential((binding.clone(), layers_binding, layer_ids_binding)), + ); + commands.insert_resource(OitResolveBindGroup(bind_group)); + } +} diff --git a/crates/bevy_core_pipeline/src/oit/resolve/node.rs b/crates/bevy_core_pipeline/src/oit/resolve/node.rs new file mode 100644 index 00000000000000..14d42235f12a99 --- /dev/null +++ b/crates/bevy_core_pipeline/src/oit/resolve/node.rs @@ -0,0 +1,78 @@ +use bevy_ecs::{prelude::*, query::QueryItem}; +use bevy_render::{ + camera::ExtractedCamera, + render_graph::{NodeRunError, RenderGraphContext, RenderLabel, ViewNode}, + render_resource::{BindGroupEntries, PipelineCache, RenderPassDescriptor}, + renderer::RenderContext, + view::{ViewDepthTexture, ViewTarget, ViewUniformOffset}, +}; + +use super::{OitResolveBindGroup, OitResolvePipeline, OitResolvePipelineId}; + +/// Render label for the OIT resolve pass. +#[derive(RenderLabel, Debug, Clone, Hash, PartialEq, Eq)] +pub struct OitResolvePass; + +/// The node that executes the OIT resolve pass. +#[derive(Default)] +pub struct OitResolveNode; +impl ViewNode for OitResolveNode { + type ViewQuery = ( + &'static ExtractedCamera, + &'static ViewTarget, + &'static ViewUniformOffset, + &'static OitResolvePipelineId, + &'static ViewDepthTexture, + ); + + fn run( + &self, + _graph: &mut RenderGraphContext, + render_context: &mut RenderContext, + (camera, view_target, view_uniform, oit_resolve_pipeline_id, depth): QueryItem< + Self::ViewQuery, + >, + world: &World, + ) -> Result<(), NodeRunError> { + let Some(resolve_pipeline) = world.get_resource::() else { + return Ok(()); + }; + + // resolve oit + // sorts the layers and renders the final blended color to the screen + { + let pipeline_cache = world.resource::(); + let bind_group = world.resource::(); + let Some(pipeline) = pipeline_cache.get_render_pipeline(oit_resolve_pipeline_id.0) + else { + return Ok(()); + }; + + let depth_bind_group = render_context.render_device().create_bind_group( + "oit_resolve_depth_bind_group", + &resolve_pipeline.oit_depth_bind_group_layout, + &BindGroupEntries::single(depth.view()), + ); + + let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor { + label: Some("oit_resolve_pass"), + color_attachments: &[Some(view_target.get_color_attachment())], + depth_stencil_attachment: None, + timestamp_writes: None, + occlusion_query_set: None, + }); + + if let Some(viewport) = camera.viewport.as_ref() { + render_pass.set_camera_viewport(viewport); + } + + render_pass.set_render_pipeline(pipeline); + render_pass.set_bind_group(0, bind_group, &[view_uniform.offset]); + render_pass.set_bind_group(1, &depth_bind_group, &[]); + + render_pass.draw(0..3, 0..1); + } + + Ok(()) + } +} diff --git a/crates/bevy_core_pipeline/src/oit/resolve/oit_resolve.wgsl b/crates/bevy_core_pipeline/src/oit/resolve/oit_resolve.wgsl new file mode 100644 index 00000000000000..41ebb457ad5349 --- /dev/null +++ b/crates/bevy_core_pipeline/src/oit/resolve/oit_resolve.wgsl @@ -0,0 +1,117 @@ +#import bevy_render::view::View + +@group(0) @binding(0) var view: View; +@group(0) @binding(1) var layers: array>; +@group(0) @binding(2) var layer_ids: array>; + +@group(1) @binding(0) var depth: texture_depth_2d; + +struct OitFragment { + color: vec3, + alpha: f32, + depth: f32, +} +// Contains all the colors and depth for this specific fragment +var fragment_list: array; + +struct FullscreenVertexOutput { + @builtin(position) position: vec4, + @location(0) uv: vec2, +}; + +@fragment +fn fragment(in: FullscreenVertexOutput) -> @location(0) vec4 { + let buffer_size = i32(view.viewport.z * view.viewport.w); + let screen_index = i32(floor(in.position.x) + floor(in.position.y) * view.viewport.z); + + let counter = atomicLoad(&layer_ids[screen_index]); + if counter == 0 { + reset_indices(screen_index); + + // https://github.com/gfx-rs/wgpu/issues/4416 + if true { + discard; + } + return vec4(0.0); + } else { + // Load depth for manual depth testing. + // This is necessary because early z doesn't seem to trigger in the transparent pass. + // This should be done during the draw pass so those fragments simply don't exist in the list, + // but this requires a bigger refactor + let d = textureLoad(depth, vec2(in.position.xy), 0); + let result = sort(screen_index, buffer_size, d); + reset_indices(screen_index); + + return result.color; + } +} + +// Resets all indices to 0. +// This means we don't have to clear the entire layers buffer +fn reset_indices(screen_index: i32) { + atomicStore(&layer_ids[screen_index], 0); + layers[screen_index] = vec2(0u); +} + +struct SortResult { + color: vec4f, + depth: f32, +} + +fn sort(screen_index: i32, buffer_size: i32, opaque_depth: f32) -> SortResult { + var counter = atomicLoad(&layer_ids[screen_index]); + + // fill list + for (var i = 0; i < counter; i += 1) { + let fragment = layers[screen_index + buffer_size * i]; + // unpack color/alpha/depth + let color = bevy_pbr::rgb9e5::rgb9e5_to_vec3_(fragment.x); + let depth_alpha = bevy_core_pipeline::oit::unpack_24bit_depth_8bit_alpha(fragment.y); + fragment_list[i].color = color; + fragment_list[i].alpha = depth_alpha.y; + fragment_list[i].depth = depth_alpha.x; + } + + // bubble sort the list based on the depth + for (var i = counter; i >= 0; i -= 1) { + for (var j = 0; j < i; j += 1) { + if fragment_list[j].depth < fragment_list[j + 1].depth { + // swap + let temp = fragment_list[j + 1]; + fragment_list[j + 1] = fragment_list[j]; + fragment_list[j] = temp; + } + } + } + + // resolve blend + var final_color = vec4(0.0); + for (var i = 0; i <= counter; i += 1) { + // depth testing + // This needs to happen here because we can only stop iterating if the fragment is + // occluded by something opaque and the fragments need to be sorted first + if fragment_list[i].depth < opaque_depth { + break; + } + let color = fragment_list[i].color; + let alpha = fragment_list[i].alpha; + var base_color = vec4(color.rgb * alpha, alpha); + final_color = blend(final_color, base_color); + if final_color.a == 1.0 { + break; + } + } + var result: SortResult; + result.color = final_color; + result.depth = fragment_list[0].depth; + + return result; +} + +// OVER operator using premultiplied alpha +// see: https://en.wikipedia.org/wiki/Alpha_compositing +fn blend(color_a: vec4, color_b: vec4) -> vec4 { + let final_color = color_a.rgb + (1.0 - color_a.a) * color_b.rgb; + let alpha = color_a.a + (1.0 - color_a.a) * color_b.a; + return vec4(final_color.rgb, alpha); +} diff --git a/crates/bevy_core_pipeline/src/post_process/mod.rs b/crates/bevy_core_pipeline/src/post_process/mod.rs index 79c41f990b33ce..a633134b276d2e 100644 --- a/crates/bevy_core_pipeline/src/post_process/mod.rs +++ b/crates/bevy_core_pipeline/src/post_process/mod.rs @@ -14,6 +14,7 @@ use bevy_ecs::{ system::{lifetimeless::Read, Commands, Query, Res, ResMut, Resource}, world::{FromWorld, World}, }; +use bevy_image::{BevyDefault, Image}; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ camera::Camera, @@ -32,7 +33,7 @@ use bevy_render::{ TextureDimension, TextureFormat, TextureSampleType, }, renderer::{RenderContext, RenderDevice, RenderQueue}, - texture::{BevyDefault, GpuImage, Image}, + texture::GpuImage, view::{ExtractedView, ViewTarget}, Render, RenderApp, RenderSet, }; @@ -344,6 +345,7 @@ impl SpecializedRenderPipeline for PostProcessingPipeline { depth_stencil: None, multisample: default(), push_constant_ranges: vec![], + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/prepass/mod.rs b/crates/bevy_core_pipeline/src/prepass/mod.rs index 4829c76ee7653b..78bac66df0f0f2 100644 --- a/crates/bevy_core_pipeline/src/prepass/mod.rs +++ b/crates/bevy_core_pipeline/src/prepass/mod.rs @@ -29,24 +29,25 @@ pub mod node; use core::ops::Range; +use crate::deferred::{DEFERRED_LIGHTING_PASS_ID_FORMAT, DEFERRED_PREPASS_FORMAT}; use bevy_asset::UntypedAssetId; use bevy_ecs::prelude::*; use bevy_math::Mat4; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; +use bevy_render::render_phase::PhaseItemBinKey; +use bevy_render::sync_world::MainEntity; use bevy_render::{ render_phase::{ BinnedPhaseItem, CachedRenderPipelinePhaseItem, DrawFunctionId, PhaseItem, PhaseItemExtraIndex, }, render_resource::{ - BindGroupId, CachedRenderPipelineId, ColorTargetState, ColorWrites, DynamicUniformBuffer, - Extent3d, ShaderType, TextureFormat, TextureView, + CachedRenderPipelineId, ColorTargetState, ColorWrites, DynamicUniformBuffer, Extent3d, + ShaderType, TextureFormat, TextureView, }, texture::ColorAttachment, }; -use crate::deferred::{DEFERRED_LIGHTING_PASS_ID_FORMAT, DEFERRED_PREPASS_FORMAT}; - pub const NORMAL_PREPASS_FORMAT: TextureFormat = TextureFormat::Rgb10a2Unorm; pub const MOTION_VECTOR_PREPASS_FORMAT: TextureFormat = TextureFormat::Rg16Float; @@ -143,40 +144,65 @@ pub struct Opaque3dPrepass { /// An entity from which Bevy fetches data common to all instances in this /// batch, such as the mesh. - pub representative_entity: Entity, - + pub representative_entity: (Entity, MainEntity), pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, } -// TODO: Try interning these. -/// The data used to bin each opaque 3D object in the prepass and deferred pass. +/// Information that must be identical in order to place opaque meshes in the +/// same *batch set* in the prepass and deferred pass. +/// +/// A batch set is a set of batches that can be multi-drawn together, if +/// multi-draw is in use. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct OpaqueNoLightmap3dBinKey { +pub struct OpaqueNoLightmap3dBatchSetKey { /// The ID of the GPU pipeline. pub pipeline: CachedRenderPipelineId, /// The function used to draw the mesh. pub draw_function: DrawFunctionId, + /// The ID of a bind group specific to the material. + /// + /// In the case of PBR, this is the `MaterialBindGroupIndex`. + pub material_bind_group_index: Option, +} + +// TODO: Try interning these. +/// The data used to bin each opaque 3D object in the prepass and deferred pass. +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct OpaqueNoLightmap3dBinKey { + /// The key of the *batch set*. + /// + /// As batches belong to a batch set, meshes in a batch must obviously be + /// able to be placed in a single batch set. + pub batch_set_key: OpaqueNoLightmap3dBatchSetKey, + /// The ID of the asset. pub asset_id: UntypedAssetId, +} - /// The ID of a bind group specific to the material. - /// - /// In the case of PBR, this is the `MaterialBindGroupId`. - pub material_bind_group_id: Option, +impl PhaseItemBinKey for OpaqueNoLightmap3dBinKey { + type BatchSetKey = OpaqueNoLightmap3dBatchSetKey; + + fn get_batch_set_key(&self) -> Option { + Some(self.batch_set_key.clone()) + } } impl PhaseItem for Opaque3dPrepass { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -191,7 +217,7 @@ impl PhaseItem for Opaque3dPrepass { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -206,7 +232,7 @@ impl BinnedPhaseItem for Opaque3dPrepass { #[inline] fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -222,7 +248,7 @@ impl BinnedPhaseItem for Opaque3dPrepass { impl CachedRenderPipelinePhaseItem for Opaque3dPrepass { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } @@ -233,7 +259,7 @@ impl CachedRenderPipelinePhaseItem for Opaque3dPrepass { /// Used to render all meshes with a material with an alpha mask. pub struct AlphaMask3dPrepass { pub key: OpaqueNoLightmap3dBinKey, - pub representative_entity: Entity, + pub representative_entity: (Entity, MainEntity), pub batch_range: Range, pub extra_index: PhaseItemExtraIndex, } @@ -241,12 +267,16 @@ pub struct AlphaMask3dPrepass { impl PhaseItem for AlphaMask3dPrepass { #[inline] fn entity(&self) -> Entity { - self.representative_entity + self.representative_entity.0 + } + + fn main_entity(&self) -> MainEntity { + self.representative_entity.1 } #[inline] fn draw_function(&self) -> DrawFunctionId { - self.key.draw_function + self.key.batch_set_key.draw_function } #[inline] @@ -261,7 +291,7 @@ impl PhaseItem for AlphaMask3dPrepass { #[inline] fn extra_index(&self) -> PhaseItemExtraIndex { - self.extra_index + self.extra_index.clone() } #[inline] @@ -276,7 +306,7 @@ impl BinnedPhaseItem for AlphaMask3dPrepass { #[inline] fn new( key: Self::BinKey, - representative_entity: Entity, + representative_entity: (Entity, MainEntity), batch_range: Range, extra_index: PhaseItemExtraIndex, ) -> Self { @@ -292,7 +322,7 @@ impl BinnedPhaseItem for AlphaMask3dPrepass { impl CachedRenderPipelinePhaseItem for AlphaMask3dPrepass { #[inline] fn cached_pipeline(&self) -> CachedRenderPipelineId { - self.key.pipeline + self.key.batch_set_key.pipeline } } diff --git a/crates/bevy_core_pipeline/src/skybox/mod.rs b/crates/bevy_core_pipeline/src/skybox/mod.rs index 59cfa908863c21..5ca7c3fce2d268 100644 --- a/crates/bevy_core_pipeline/src/skybox/mod.rs +++ b/crates/bevy_core_pipeline/src/skybox/mod.rs @@ -6,6 +6,7 @@ use bevy_ecs::{ schedule::IntoSystemConfigs, system::{Commands, Query, Res, ResMut, Resource}, }; +use bevy_image::{BevyDefault, Image}; use bevy_math::{Mat4, Quat}; use bevy_render::{ camera::Exposure, @@ -19,7 +20,7 @@ use bevy_render::{ *, }, renderer::RenderDevice, - texture::{BevyDefault, GpuImage, Image}, + texture::GpuImage, view::{ExtractedView, Msaa, ViewTarget, ViewUniform, ViewUniforms}, Render, RenderApp, RenderSet, }; @@ -233,6 +234,7 @@ impl SpecializedRenderPipeline for SkyboxPipeline { write_mask: ColorWrites::ALL, })], }), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/skybox/prepass.rs b/crates/bevy_core_pipeline/src/skybox/prepass.rs index fb8df89b4b6023..c51e707808e93b 100644 --- a/crates/bevy_core_pipeline/src/skybox/prepass.rs +++ b/crates/bevy_core_pipeline/src/skybox/prepass.rs @@ -105,6 +105,7 @@ impl SpecializedRenderPipeline for SkyboxPrepassPipeline { entry_point: "fragment".into(), targets: prepass_target_descriptors(key.normal_prepass, true, false), }), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/smaa/mod.rs b/crates/bevy_core_pipeline/src/smaa/mod.rs index a41a77c806844d..7471cdb09ab769 100644 --- a/crates/bevy_core_pipeline/src/smaa/mod.rs +++ b/crates/bevy_core_pipeline/src/smaa/mod.rs @@ -29,7 +29,12 @@ //! * Compatibility with SSAA and MSAA. //! //! [SMAA]: https://www.iryoku.com/smaa/ - +#[cfg(not(feature = "smaa_luts"))] +use crate::tonemapping::lut_placeholder; +use crate::{ + core_2d::graph::{Core2d, Node2d}, + core_3d::graph::{Core3d, Node3d}, +}; use bevy_app::{App, Plugin}; #[cfg(feature = "smaa_luts")] use bevy_asset::load_internal_binary_asset; @@ -44,6 +49,7 @@ use bevy_ecs::{ system::{lifetimeless::Read, Commands, Query, Res, ResMut, Resource}, world::{FromWorld, World}, }; +use bevy_image::{BevyDefault, Image}; use bevy_math::{vec4, Vec4}; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ @@ -67,24 +73,12 @@ use bevy_render::{ VertexState, }, renderer::{RenderContext, RenderDevice, RenderQueue}, - texture::{BevyDefault, CachedTexture, GpuImage, Image, TextureCache}, + texture::{CachedTexture, GpuImage, TextureCache}, view::{ExtractedView, ViewTarget}, Render, RenderApp, RenderSet, }; -#[cfg(feature = "smaa_luts")] -use bevy_render::{ - render_asset::RenderAssetUsages, - texture::{CompressedImageFormats, ImageFormat, ImageSampler, ImageType}, -}; use bevy_utils::prelude::default; -#[cfg(not(feature = "smaa_luts"))] -use crate::tonemapping::lut_placeholder; -use crate::{ - core_2d::graph::{Core2d, Node2d}, - core_3d::graph::{Core3d, Node3d}, -}; - /// The handle of the `smaa.wgsl` shader. const SMAA_SHADER_HANDLE: Handle = Handle::weak_from_u128(12247928498010601081); /// The handle of the area LUT, a KTX2 format texture that SMAA uses internally. @@ -306,11 +300,11 @@ impl Plugin for SmaaPlugin { #[cfg(all(debug_assertions, feature = "dds"))] "SMAAAreaLUT".to_owned(), bytes, - ImageType::Format(ImageFormat::Ktx2), - CompressedImageFormats::NONE, + bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2), + bevy_image::CompressedImageFormats::NONE, false, - ImageSampler::Default, - RenderAssetUsages::RENDER_WORLD, + bevy_image::ImageSampler::Default, + bevy_asset::RenderAssetUsages::RENDER_WORLD, ) .expect("Failed to load SMAA area LUT") ); @@ -324,11 +318,11 @@ impl Plugin for SmaaPlugin { #[cfg(all(debug_assertions, feature = "dds"))] "SMAASearchLUT".to_owned(), bytes, - ImageType::Format(ImageFormat::Ktx2), - CompressedImageFormats::NONE, + bevy_image::ImageType::Format(bevy_image::ImageFormat::Ktx2), + bevy_image::CompressedImageFormats::NONE, false, - ImageSampler::Default, - RenderAssetUsages::RENDER_WORLD, + bevy_image::ImageSampler::Default, + bevy_asset::RenderAssetUsages::RENDER_WORLD, ) .expect("Failed to load SMAA search LUT") ); @@ -512,6 +506,7 @@ impl SpecializedRenderPipeline for SmaaEdgeDetectionPipeline { bias: default(), }), multisample: MultisampleState::default(), + zero_initialize_workgroup_memory: false, } } } @@ -571,6 +566,7 @@ impl SpecializedRenderPipeline for SmaaBlendingWeightCalculationPipeline { bias: default(), }), multisample: MultisampleState::default(), + zero_initialize_workgroup_memory: false, } } } @@ -607,6 +603,7 @@ impl SpecializedRenderPipeline for SmaaNeighborhoodBlendingPipeline { primitive: PrimitiveState::default(), depth_stencil: None, multisample: MultisampleState::default(), + zero_initialize_workgroup_memory: false, } } } diff --git a/crates/bevy_core_pipeline/src/taa/mod.rs b/crates/bevy_core_pipeline/src/taa/mod.rs index f3895d1e26241a..67422b4a7221a1 100644 --- a/crates/bevy_core_pipeline/src/taa/mod.rs +++ b/crates/bevy_core_pipeline/src/taa/mod.rs @@ -10,12 +10,13 @@ use bevy_app::{App, Plugin}; use bevy_asset::{load_internal_asset, Handle}; use bevy_core::FrameCount; use bevy_ecs::{ - prelude::{Bundle, Component, Entity, ReflectComponent}, + prelude::{require, Bundle, Component, Entity, ReflectComponent}, query::{QueryItem, With}, schedule::IntoSystemConfigs, system::{Commands, Query, Res, ResMut, Resource}, world::{FromWorld, World}, }; +use bevy_image::BevyDefault as _; use bevy_math::vec2; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ @@ -32,9 +33,10 @@ use bevy_render::{ TextureDimension, TextureFormat, TextureSampleType, TextureUsages, }, renderer::{RenderContext, RenderDevice}, - texture::{BevyDefault, CachedTexture, TextureCache}, + sync_component::SyncComponentPlugin, + sync_world::RenderEntity, + texture::{CachedTexture, TextureCache}, view::{ExtractedView, Msaa, ViewTarget}, - world_sync::RenderEntity, ExtractSchedule, MainWorld, Render, RenderApp, RenderSet, }; use bevy_utils::tracing::warn; @@ -52,6 +54,8 @@ impl Plugin for TemporalAntiAliasPlugin { app.register_type::(); + app.add_plugins(SyncComponentPlugin::::default()); + let Some(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; @@ -352,13 +356,14 @@ impl SpecializedRenderPipeline for TaaPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } fn extract_taa_settings(mut commands: Commands, mut main_world: ResMut) { let mut cameras_3d = main_world.query_filtered::<( - &RenderEntity, + RenderEntity, &Camera, &Projection, &mut TemporalAntiAliasing, @@ -373,11 +378,20 @@ fn extract_taa_settings(mut commands: Commands, mut main_world: ResMut(); } } } diff --git a/crates/bevy_core_pipeline/src/tonemapping/mod.rs b/crates/bevy_core_pipeline/src/tonemapping/mod.rs index 7614b50153a40e..c6fb3217253f93 100644 --- a/crates/bevy_core_pipeline/src/tonemapping/mod.rs +++ b/crates/bevy_core_pipeline/src/tonemapping/mod.rs @@ -2,6 +2,7 @@ use crate::fullscreen_vertex_shader::fullscreen_shader_vertex_state; use bevy_app::prelude::*; use bevy_asset::{load_internal_asset, Assets, Handle}; use bevy_ecs::prelude::*; +use bevy_image::{CompressedImageFormats, Image, ImageSampler, ImageType}; use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{ camera::Camera, @@ -13,7 +14,7 @@ use bevy_render::{ *, }, renderer::RenderDevice, - texture::{CompressedImageFormats, FallbackImage, GpuImage, Image, ImageSampler, ImageType}, + texture::{FallbackImage, GpuImage}, view::{ExtractedView, ViewTarget, ViewUniform}, Render, RenderApp, RenderSet, }; @@ -263,7 +264,7 @@ impl SpecializedRenderPipeline for TonemappingPipeline { error!( "AgX tonemapping requires the `tonemapping_luts` feature. Either enable the `tonemapping_luts` feature for bevy in `Cargo.toml` (recommended), - or use a different `Tonemapping` method in your `Camera2dBundle`/`Camera3dBundle`." + or use a different `Tonemapping` method for your `Camera2d`/`Camera3d`." ); shader_defs.push("TONEMAP_METHOD_AGX".into()); } @@ -275,7 +276,7 @@ impl SpecializedRenderPipeline for TonemappingPipeline { error!( "TonyMcMapFace tonemapping requires the `tonemapping_luts` feature. Either enable the `tonemapping_luts` feature for bevy in `Cargo.toml` (recommended), - or use a different `Tonemapping` method in your `Camera2dBundle`/`Camera3dBundle`." + or use a different `Tonemapping` method for your `Camera2d`/`Camera3d`." ); shader_defs.push("TONEMAP_METHOD_TONY_MC_MAPFACE".into()); } @@ -284,7 +285,7 @@ impl SpecializedRenderPipeline for TonemappingPipeline { error!( "BlenderFilmic tonemapping requires the `tonemapping_luts` feature. Either enable the `tonemapping_luts` feature for bevy in `Cargo.toml` (recommended), - or use a different `Tonemapping` method in your `Camera2dBundle`/`Camera3dBundle`." + or use a different `Tonemapping` method for your `Camera2d`/`Camera3d`." ); shader_defs.push("TONEMAP_METHOD_BLENDER_FILMIC".into()); } @@ -307,6 +308,7 @@ impl SpecializedRenderPipeline for TonemappingPipeline { depth_stencil: None, multisample: MultisampleState::default(), push_constant_ranges: Vec::new(), + zero_initialize_workgroup_memory: false, } } } @@ -432,14 +434,14 @@ pub fn get_lut_bind_group_layout_entries() -> [BindGroupLayoutEntryBuilder; 2] { // allow(dead_code) so it doesn't complain when the tonemapping_luts feature is disabled #[allow(dead_code)] fn setup_tonemapping_lut_image(bytes: &[u8], image_type: ImageType) -> Image { - let image_sampler = ImageSampler::Descriptor(bevy_render::texture::ImageSamplerDescriptor { + let image_sampler = ImageSampler::Descriptor(bevy_image::ImageSamplerDescriptor { label: Some("Tonemapping LUT sampler".to_string()), - address_mode_u: bevy_render::texture::ImageAddressMode::ClampToEdge, - address_mode_v: bevy_render::texture::ImageAddressMode::ClampToEdge, - address_mode_w: bevy_render::texture::ImageAddressMode::ClampToEdge, - mag_filter: bevy_render::texture::ImageFilterMode::Linear, - min_filter: bevy_render::texture::ImageFilterMode::Linear, - mipmap_filter: bevy_render::texture::ImageFilterMode::Linear, + address_mode_u: bevy_image::ImageAddressMode::ClampToEdge, + address_mode_v: bevy_image::ImageAddressMode::ClampToEdge, + address_mode_w: bevy_image::ImageAddressMode::ClampToEdge, + mag_filter: bevy_image::ImageFilterMode::Linear, + min_filter: bevy_image::ImageFilterMode::Linear, + mipmap_filter: bevy_image::ImageFilterMode::Linear, ..default() }); Image::from_buffer( diff --git a/crates/bevy_core_pipeline/src/upscaling/mod.rs b/crates/bevy_core_pipeline/src/upscaling/mod.rs index 42f6744bfb32c5..52369fca59abcd 100644 --- a/crates/bevy_core_pipeline/src/upscaling/mod.rs +++ b/crates/bevy_core_pipeline/src/upscaling/mod.rs @@ -43,31 +43,35 @@ fn prepare_view_upscaling_pipelines( blit_pipeline: Res, view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>, ) { - let mut output_textures = HashSet::new(); + let mut output_textures = >::default(); for (entity, view_target, camera) in view_targets.iter() { let out_texture_id = view_target.out_texture().id(); - let blend_state = if let Some(ExtractedCamera { - output_mode: CameraOutputMode::Write { blend_state, .. }, - .. - }) = camera - { - match *blend_state { - None => { - // If we've already seen this output for a camera and it doesn't have a output blend - // mode configured, default to alpha blend so that we don't accidentally overwrite - // the output texture - if output_textures.contains(&out_texture_id) { - Some(BlendState::ALPHA_BLENDING) - } else { - None + let blend_state = if let Some(extracted_camera) = camera { + match extracted_camera.output_mode { + CameraOutputMode::Skip => None, + CameraOutputMode::Write { blend_state, .. } => { + let already_seen = output_textures.contains(&out_texture_id); + output_textures.insert(out_texture_id); + + match blend_state { + None => { + // If we've already seen this output for a camera and it doesn't have a output blend + // mode configured, default to alpha blend so that we don't accidentally overwrite + // the output texture + if already_seen { + Some(BlendState::ALPHA_BLENDING) + } else { + None + } + } + _ => blend_state, } } - _ => *blend_state, } } else { + output_textures.insert(out_texture_id); None }; - output_textures.insert(out_texture_id); let key = BlitPipelineKey { texture_format: view_target.out_texture_format(), diff --git a/crates/bevy_dev_tools/Cargo.toml b/crates/bevy_dev_tools/Cargo.toml index b06def4b3fd4e0..1d426853cee80a 100644 --- a/crates/bevy_dev_tools/Cargo.toml +++ b/crates/bevy_dev_tools/Cargo.toml @@ -9,35 +9,23 @@ license = "MIT OR Apache-2.0" keywords = ["bevy"] [features] -default = ["bevy_ui_debug"] bevy_ci_testing = ["serde", "ron"] -bevy_ui_debug = [] [dependencies] # bevy bevy_app = { path = "../bevy_app", version = "0.15.0-dev" } bevy_asset = { path = "../bevy_asset", version = "0.15.0-dev" } bevy_color = { path = "../bevy_color", version = "0.15.0-dev" } -bevy_core = { path = "../bevy_core", version = "0.15.0-dev" } -bevy_core_pipeline = { path = "../bevy_core_pipeline", version = "0.15.0-dev" } bevy_diagnostic = { path = "../bevy_diagnostic", version = "0.15.0-dev" } bevy_ecs = { path = "../bevy_ecs", version = "0.15.0-dev" } -bevy_gizmos = { path = "../bevy_gizmos", version = "0.15.0-dev", features = [ - "bevy_render", -] } bevy_hierarchy = { path = "../bevy_hierarchy", version = "0.15.0-dev" } bevy_input = { path = "../bevy_input", version = "0.15.0-dev" } -bevy_math = { path = "../bevy_math", version = "0.15.0-dev" } -bevy_reflect = { path = "../bevy_reflect", version = "0.15.0-dev" } bevy_render = { path = "../bevy_render", version = "0.15.0-dev" } bevy_time = { path = "../bevy_time", version = "0.15.0-dev" } -bevy_transform = { path = "../bevy_transform", version = "0.15.0-dev" } -bevy_ui = { path = "../bevy_ui", version = "0.15.0-dev", features = [ - "bevy_text", -] } +bevy_text = { path = "../bevy_text", version = "0.15.0-dev" } +bevy_ui = { path = "../bevy_ui", version = "0.15.0-dev" } bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } bevy_window = { path = "../bevy_window", version = "0.15.0-dev" } -bevy_text = { path = "../bevy_text", version = "0.15.0-dev" } bevy_state = { path = "../bevy_state", version = "0.15.0-dev" } # other diff --git a/crates/bevy_dev_tools/src/ci_testing/mod.rs b/crates/bevy_dev_tools/src/ci_testing/mod.rs index 5c85aeff483403..9f31db21408925 100644 --- a/crates/bevy_dev_tools/src/ci_testing/mod.rs +++ b/crates/bevy_dev_tools/src/ci_testing/mod.rs @@ -17,7 +17,7 @@ use core::time::Duration; /// (`ci_testing_config.ron` by default) and executes its specified actions. For a reference of the /// allowed configuration, see [`CiTestingConfig`]. /// -/// This plugin is included within `DefaultPlugins`, `HeadlessPlugins` and `MinimalPlugins` +/// This plugin is included within `DefaultPlugins` and `MinimalPlugins` /// when the `bevy_ci_testing` feature is enabled. /// It is recommended to only used this plugin during testing (manual or /// automatic), and disable it during regular development and for production builds. @@ -56,7 +56,8 @@ impl Plugin for CiTestingPlugin { systems::send_events .before(trigger_screenshots) .before(bevy_window::close_when_requested) - .in_set(SendEvents), + .in_set(SendEvents) + .ambiguous_with_all(), ); // The offending system does not exist in the wasm32 target. diff --git a/crates/bevy_dev_tools/src/fps_overlay.rs b/crates/bevy_dev_tools/src/fps_overlay.rs index 97d0c3989a79de..f970fc8f434c85 100644 --- a/crates/bevy_dev_tools/src/fps_overlay.rs +++ b/crates/bevy_dev_tools/src/fps_overlay.rs @@ -7,16 +7,17 @@ use bevy_diagnostic::{DiagnosticsStore, FrameTimeDiagnosticsPlugin}; use bevy_ecs::{ change_detection::DetectChangesMut, component::Component, + entity::Entity, query::With, schedule::{common_conditions::resource_changed, IntoSystemConfigs}, system::{Commands, Query, Res, Resource}, }; use bevy_hierarchy::{BuildChildren, ChildBuild}; use bevy_render::view::Visibility; -use bevy_text::{Font, Text, TextSection, TextStyle}; +use bevy_text::{Font, TextColor, TextFont, TextSpan}; use bevy_ui::{ - node_bundles::{NodeBundle, TextBundle}, - GlobalZIndex, PositionType, Style, + widget::{Text, TextUiWriter}, + GlobalZIndex, Node, PositionType, }; use bevy_utils::default; @@ -60,7 +61,9 @@ impl Plugin for FpsOverlayPlugin { #[derive(Resource, Clone)] pub struct FpsOverlayConfig { /// Configuration of text in the overlay. - pub text_config: TextStyle, + pub text_config: TextFont, + /// Color of text in the overlay. + pub text_color: Color, /// Displays the FPS overlay if true. pub enabled: bool, } @@ -68,11 +71,12 @@ pub struct FpsOverlayConfig { impl Default for FpsOverlayConfig { fn default() -> Self { FpsOverlayConfig { - text_config: TextStyle { + text_config: TextFont { font: Handle::::default(), font_size: 32.0, - color: Color::WHITE, + ..default() }, + text_color: Color::WHITE, enabled: true, } } @@ -84,33 +88,34 @@ struct FpsText; fn setup(mut commands: Commands, overlay_config: Res) { commands .spawn(( - NodeBundle { - style: Style { - // We need to make sure the overlay doesn't affect the position of other UI nodes - position_type: PositionType::Absolute, - ..default() - }, - // Render overlay on top of everything + Node { + // We need to make sure the overlay doesn't affect the position of other UI nodes + position_type: PositionType::Absolute, ..default() }, + // Render overlay on top of everything GlobalZIndex(FPS_OVERLAY_ZINDEX), )) - .with_children(|c| { - c.spawn(( - TextBundle::from_sections([ - TextSection::new("FPS: ", overlay_config.text_config.clone()), - TextSection::from_style(overlay_config.text_config.clone()), - ]), + .with_children(|p| { + p.spawn(( + Text::new("FPS: "), + overlay_config.text_config.clone(), + TextColor(overlay_config.text_color), FpsText, - )); + )) + .with_child((TextSpan::default(), overlay_config.text_config.clone())); }); } -fn update_text(diagnostic: Res, mut query: Query<&mut Text, With>) { - for mut text in &mut query { +fn update_text( + diagnostic: Res, + query: Query>, + mut writer: TextUiWriter, +) { + for entity in &query { if let Some(fps) = diagnostic.get(&FrameTimeDiagnosticsPlugin::FPS) { if let Some(value) = fps.smoothed() { - text.sections[1].value = format!("{value:.2}"); + *writer.text(entity, 1) = format!("{value:.2}"); } } } @@ -118,12 +123,14 @@ fn update_text(diagnostic: Res, mut query: Query<&mut Text, Wi fn customize_text( overlay_config: Res, - mut query: Query<&mut Text, With>, + query: Query>, + mut writer: TextUiWriter, ) { - for mut text in &mut query { - for section in text.sections.iter_mut() { - section.style = overlay_config.text_config.clone(); - } + for entity in &query { + writer.for_each_font(entity, |mut font| { + *font = overlay_config.text_config.clone(); + }); + writer.for_each_color(entity, |mut color| color.0 = overlay_config.text_color); } } diff --git a/crates/bevy_dev_tools/src/lib.rs b/crates/bevy_dev_tools/src/lib.rs index d5563a0bd946a8..b49604e6c885d1 100644 --- a/crates/bevy_dev_tools/src/lib.rs +++ b/crates/bevy_dev_tools/src/lib.rs @@ -15,9 +15,6 @@ pub mod ci_testing; pub mod fps_overlay; -#[cfg(feature = "bevy_ui_debug")] -pub mod ui_debug_overlay; - pub mod states; /// Enables developer tools in an [`App`]. This plugin is added automatically with `bevy_dev_tools` diff --git a/crates/bevy_dev_tools/src/ui_debug_overlay/inset.rs b/crates/bevy_dev_tools/src/ui_debug_overlay/inset.rs deleted file mode 100644 index 92522f6fbe68f8..00000000000000 --- a/crates/bevy_dev_tools/src/ui_debug_overlay/inset.rs +++ /dev/null @@ -1,192 +0,0 @@ -use bevy_color::Color; -use bevy_gizmos::{config::GizmoConfigGroup, prelude::Gizmos}; -use bevy_math::{Vec2, Vec2Swizzles}; -use bevy_reflect::Reflect; -use bevy_transform::prelude::GlobalTransform; -use bevy_utils::HashMap; - -use super::{CameraQuery, LayoutRect}; - -// Function used here so we don't need to redraw lines that are fairly close to each other. -fn approx_eq(compared: f32, other: f32) -> bool { - (compared - other).abs() < 0.001 -} - -fn rect_border_axis(rect: LayoutRect) -> (f32, f32, f32, f32) { - let pos = rect.pos; - let size = rect.size; - let offset = pos + size; - (pos.x, offset.x, pos.y, offset.y) -} - -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug)] -enum Dir { - Start, - End, -} -impl Dir { - const fn increments(self) -> i64 { - match self { - Dir::Start => 1, - Dir::End => -1, - } - } -} -impl From for Dir { - fn from(value: i64) -> Self { - if value.is_positive() { - Dir::Start - } else { - Dir::End - } - } -} -/// Collection of axis aligned "lines" (actually just their coordinate on -/// a given axis). -#[derive(Debug, Clone)] -struct DrawnLines { - lines: HashMap, - width: f32, -} -#[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)] -impl DrawnLines { - fn new(width: f32) -> Self { - DrawnLines { - lines: HashMap::new(), - width, - } - } - /// Return `value` offset by as many `increment`s as necessary to make it - /// not overlap with already drawn lines. - fn inset(&self, value: f32) -> f32 { - let scaled = value / self.width; - let fract = scaled.fract(); - let mut on_grid = scaled.floor() as i64; - for _ in 0..10 { - let Some(dir) = self.lines.get(&on_grid) else { - break; - }; - // TODO(clean): This fixes a panic, but I'm not sure how valid this is - let Some(added) = on_grid.checked_add(dir.increments()) else { - break; - }; - on_grid = added; - } - ((on_grid as f32) + fract) * self.width - } - /// Remove a line from the collection of drawn lines. - /// - /// Typically, we only care for pre-existing lines when drawing the children - /// of a container, nothing more. So we remove it after we are done with - /// the children. - fn remove(&mut self, value: f32, increment: i64) { - let mut on_grid = (value / self.width).floor() as i64; - loop { - // TODO(clean): This fixes a panic, but I'm not sure how valid this is - let Some(next_cell) = on_grid.checked_add(increment) else { - return; - }; - if !self.lines.contains_key(&next_cell) { - self.lines.remove(&on_grid); - return; - } - on_grid = next_cell; - } - } - /// Add a line from the collection of drawn lines. - fn add(&mut self, value: f32, increment: i64) { - let mut on_grid = (value / self.width).floor() as i64; - loop { - let old_value = self.lines.insert(on_grid, increment.into()); - if old_value.is_none() { - return; - } - // TODO(clean): This fixes a panic, but I'm not sure how valid this is - let Some(added) = on_grid.checked_add(increment) else { - return; - }; - on_grid = added; - } - } -} - -#[derive(GizmoConfigGroup, Reflect, Default)] -pub struct UiGizmosDebug; - -pub(super) struct InsetGizmo<'w, 's> { - draw: Gizmos<'w, 's, UiGizmosDebug>, - cam: CameraQuery<'w, 's>, - known_y: DrawnLines, - known_x: DrawnLines, -} -impl<'w, 's> InsetGizmo<'w, 's> { - pub(super) fn new( - draw: Gizmos<'w, 's, UiGizmosDebug>, - cam: CameraQuery<'w, 's>, - line_width: f32, - ) -> Self { - InsetGizmo { - draw, - cam, - known_y: DrawnLines::new(line_width), - known_x: DrawnLines::new(line_width), - } - } - fn relative(&self, mut position: Vec2) -> Vec2 { - let zero = GlobalTransform::IDENTITY; - let Ok(cam) = self.cam.get_single() else { - return Vec2::ZERO; - }; - if let Ok(new_position) = cam.world_to_viewport(&zero, position.extend(0.)) { - position = new_position; - }; - position.xy() - } - fn line_2d(&mut self, mut start: Vec2, mut end: Vec2, color: Color) { - if approx_eq(start.x, end.x) { - start.x = self.known_x.inset(start.x); - end.x = start.x; - } else if approx_eq(start.y, end.y) { - start.y = self.known_y.inset(start.y); - end.y = start.y; - } - let (start, end) = (self.relative(start), self.relative(end)); - self.draw.line_2d(start, end, color); - } - pub(super) fn set_scope(&mut self, rect: LayoutRect) { - let (left, right, top, bottom) = rect_border_axis(rect); - self.known_x.add(left, 1); - self.known_x.add(right, -1); - self.known_y.add(top, 1); - self.known_y.add(bottom, -1); - } - pub(super) fn clear_scope(&mut self, rect: LayoutRect) { - let (left, right, top, bottom) = rect_border_axis(rect); - self.known_x.remove(left, 1); - self.known_x.remove(right, -1); - self.known_y.remove(top, 1); - self.known_y.remove(bottom, -1); - } - pub(super) fn rect_2d(&mut self, rect: LayoutRect, color: Color) { - let (left, right, top, bottom) = rect_border_axis(rect); - if approx_eq(left, right) { - self.line_2d(Vec2::new(left, top), Vec2::new(left, bottom), color); - } else if approx_eq(top, bottom) { - self.line_2d(Vec2::new(left, top), Vec2::new(right, top), color); - } else { - let inset_x = |v| self.known_x.inset(v); - let inset_y = |v| self.known_y.inset(v); - let (left, right) = (inset_x(left), inset_x(right)); - let (top, bottom) = (inset_y(top), inset_y(bottom)); - let strip = [ - Vec2::new(left, top), - Vec2::new(left, bottom), - Vec2::new(right, bottom), - Vec2::new(right, top), - Vec2::new(left, top), - ]; - self.draw - .linestrip_2d(strip.map(|v| self.relative(v)), color); - } - } -} diff --git a/crates/bevy_dev_tools/src/ui_debug_overlay/mod.rs b/crates/bevy_dev_tools/src/ui_debug_overlay/mod.rs deleted file mode 100644 index d84143943c9732..00000000000000 --- a/crates/bevy_dev_tools/src/ui_debug_overlay/mod.rs +++ /dev/null @@ -1,280 +0,0 @@ -//! A visual representation of UI node sizes. -use core::any::{Any, TypeId}; - -use bevy_app::{App, Plugin, PostUpdate}; -use bevy_color::Hsla; -use bevy_core::Name; -use bevy_core_pipeline::core_2d::Camera2dBundle; -use bevy_ecs::{prelude::*, system::SystemParam}; -use bevy_gizmos::{config::GizmoConfigStore, prelude::Gizmos, AppGizmoBuilder}; -use bevy_hierarchy::{Children, Parent}; -use bevy_math::{Vec2, Vec3Swizzles}; -use bevy_render::{ - camera::RenderTarget, - prelude::*, - view::{RenderLayers, VisibilitySystems}, -}; -use bevy_transform::{prelude::GlobalTransform, TransformSystem}; -use bevy_ui::{DefaultUiCamera, Display, Node, Style, TargetCamera, UiScale}; -use bevy_utils::{default, warn_once}; -use bevy_window::{PrimaryWindow, Window, WindowRef}; - -use inset::InsetGizmo; - -use self::inset::UiGizmosDebug; - -mod inset; - -/// The [`Camera::order`] index used by the layout debug camera. -pub const LAYOUT_DEBUG_CAMERA_ORDER: isize = 255; -/// The [`RenderLayers`] used by the debug gizmos and the debug camera. -pub const LAYOUT_DEBUG_LAYERS: RenderLayers = RenderLayers::layer(16); - -#[derive(Clone, Copy)] -struct LayoutRect { - pos: Vec2, - size: Vec2, -} - -impl LayoutRect { - fn new(trans: &GlobalTransform, node: &Node, scale: f32) -> Self { - let mut this = Self { - pos: trans.translation().xy() * scale, - size: node.size() * scale, - }; - this.pos -= this.size / 2.; - this - } -} - -#[derive(Component, Debug, Clone, Default)] -struct DebugOverlayCamera; - -/// The debug overlay options. -#[derive(Resource, Clone, Default)] -pub struct UiDebugOptions { - /// Whether the overlay is enabled. - pub enabled: bool, - layout_gizmos_camera: Option, -} -impl UiDebugOptions { - /// This will toggle the enabled field, setting it to false if true and true if false. - pub fn toggle(&mut self) { - self.enabled = !self.enabled; - } -} - -/// The system responsible to change the [`Camera`] config based on changes in [`UiDebugOptions`] and [`GizmoConfig`](bevy_gizmos::prelude::GizmoConfig). -fn update_debug_camera( - mut gizmo_config: ResMut, - mut options: ResMut, - mut cmds: Commands, - mut debug_cams: Query<&mut Camera, With>, -) { - if !options.is_changed() && !gizmo_config.is_changed() { - return; - } - if !options.enabled { - let Some(cam) = options.layout_gizmos_camera else { - return; - }; - let Ok(mut cam) = debug_cams.get_mut(cam) else { - return; - }; - cam.is_active = false; - if let Some((config, _)) = gizmo_config.get_config_mut_dyn(&TypeId::of::()) { - config.enabled = false; - } - } else { - let spawn_cam = || { - cmds.spawn(( - Camera2dBundle { - projection: OrthographicProjection { - far: 1000.0, - viewport_origin: Vec2::new(0.0, 0.0), - ..OrthographicProjection::default_3d() - }, - camera: Camera { - order: LAYOUT_DEBUG_CAMERA_ORDER, - clear_color: ClearColorConfig::None, - ..default() - }, - ..default() - }, - LAYOUT_DEBUG_LAYERS.clone(), - DebugOverlayCamera, - Name::new("Layout Debug Camera"), - )) - .id() - }; - if let Some((config, _)) = gizmo_config.get_config_mut_dyn(&TypeId::of::()) { - config.enabled = true; - config.render_layers = LAYOUT_DEBUG_LAYERS.clone(); - } - let cam = *options.layout_gizmos_camera.get_or_insert_with(spawn_cam); - let Ok(mut cam) = debug_cams.get_mut(cam) else { - return; - }; - cam.is_active = true; - } -} - -/// The function that goes over every children of given [`Entity`], skipping the not visible ones and drawing the gizmos outlines. -fn outline_nodes(outline: &OutlineParam, draw: &mut InsetGizmo, this_entity: Entity, scale: f32) { - let Ok(to_iter) = outline.children.get(this_entity) else { - return; - }; - - for (entity, trans, node, style, children) in outline.nodes.iter_many(to_iter) { - if style.is_none() || style.is_some_and(|s| matches!(s.display, Display::None)) { - continue; - } - - if let Ok(view_visibility) = outline.view_visibility.get(entity) { - if !view_visibility.get() { - continue; - } - } - let rect = LayoutRect::new(trans, node, scale); - outline_node(entity, rect, draw); - if children.is_some() { - outline_nodes(outline, draw, entity, scale); - } - draw.clear_scope(rect); - } -} - -type NodesQuery = ( - Entity, - &'static GlobalTransform, - &'static Node, - Option<&'static Style>, - Option<&'static Children>, -); - -#[derive(SystemParam)] -struct OutlineParam<'w, 's> { - gizmo_config: Res<'w, GizmoConfigStore>, - children: Query<'w, 's, &'static Children>, - nodes: Query<'w, 's, NodesQuery>, - view_visibility: Query<'w, 's, &'static ViewVisibility>, - ui_scale: Res<'w, UiScale>, -} - -type CameraQuery<'w, 's> = Query<'w, 's, &'static Camera, With>; - -#[derive(SystemParam)] -struct CameraParam<'w, 's> { - debug_camera: Query<'w, 's, &'static Camera, With>, - cameras: Query<'w, 's, &'static Camera, Without>, - primary_window: Query<'w, 's, &'static Window, With>, - default_ui_camera: DefaultUiCamera<'w, 's>, -} - -/// system responsible for drawing the gizmos lines around all the node roots, iterating recursively through all visible children. -fn outline_roots( - outline: OutlineParam, - draw: Gizmos, - cam: CameraParam, - roots: Query< - ( - Entity, - &GlobalTransform, - &Node, - Option<&ViewVisibility>, - Option<&TargetCamera>, - ), - Without, - >, - window: Query<&Window, With>, - nonprimary_windows: Query<&Window, Without>, - options: Res, -) { - if !options.enabled { - return; - } - if !nonprimary_windows.is_empty() { - warn_once!( - "The layout debug view only uses the primary window scale, \ - you might notice gaps between container lines" - ); - } - let window_scale = window.get_single().map_or(1., Window::scale_factor); - let scale_factor = outline.ui_scale.0; - - // We let the line be defined by the window scale alone - let line_width = outline - .gizmo_config - .get_config_dyn(&UiGizmosDebug.type_id()) - .map_or(2., |(config, _)| config.line_width) - / window_scale; - let mut draw = InsetGizmo::new(draw, cam.debug_camera, line_width); - for (entity, trans, node, view_visibility, maybe_target_camera) in &roots { - if let Some(view_visibility) = view_visibility { - // If the entity isn't visible, we will not draw any lines. - if !view_visibility.get() { - continue; - } - } - // We skip ui in other windows that are not the primary one - if let Some(camera_entity) = maybe_target_camera - .map(|target| target.0) - .or(cam.default_ui_camera.get()) - { - let Ok(camera) = cam.cameras.get(camera_entity) else { - // The camera wasn't found. Either the Camera don't exist or the Camera is the debug Camera, that we want to skip and warn - warn_once!("Camera {:?} wasn't found for debug overlay", camera_entity); - continue; - }; - match camera.target { - RenderTarget::Window(window_ref) => { - if let WindowRef::Entity(window_entity) = window_ref { - if cam.primary_window.get(window_entity).is_err() { - // This window isn't the primary, so we skip this root. - continue; - } - } - } - // Hard to know the results of this, better skip this target. - _ => continue, - } - } - - let rect = LayoutRect::new(trans, node, scale_factor); - outline_node(entity, rect, &mut draw); - outline_nodes(&outline, &mut draw, entity, scale_factor); - } -} - -/// Function responsible for drawing the gizmos lines around the given Entity -fn outline_node(entity: Entity, rect: LayoutRect, draw: &mut InsetGizmo) { - let color = Hsla::sequential_dispersed(entity.index()); - - draw.rect_2d(rect, color.into()); - draw.set_scope(rect); -} - -/// The debug overlay plugin. -/// -/// This spawns a new camera with a low order, and draws gizmo. -/// -/// Note that due to limitation with [`bevy_gizmos`], multiple windows with this feature -/// enabled isn't supported and the lines are only drawn in the [`PrimaryWindow`] -pub struct DebugUiPlugin; -impl Plugin for DebugUiPlugin { - fn build(&self, app: &mut App) { - app.init_resource::() - .init_gizmo_group::() - .add_systems( - PostUpdate, - ( - update_debug_camera, - outline_roots - .after(TransformSystem::TransformPropagate) - // This needs to run before VisibilityPropagate so it can relies on ViewVisibility - .before(VisibilitySystems::VisibilityPropagate), - ) - .chain(), - ); - } -} diff --git a/crates/bevy_diagnostic/Cargo.toml b/crates/bevy_diagnostic/Cargo.toml index 972493e22d1b1d..cc9d7f1019ee08 100644 --- a/crates/bevy_diagnostic/Cargo.toml +++ b/crates/bevy_diagnostic/Cargo.toml @@ -27,14 +27,14 @@ const-fnv1a-hash = "1.1.0" # macOS [target.'cfg(all(target_os="macos"))'.dependencies] # Some features of sysinfo are not supported by apple. This will disable those features on apple devices -sysinfo = { version = "0.31.0", optional = true, default-features = false, features = [ +sysinfo = { version = "0.32.0", optional = true, default-features = false, features = [ "apple-app-store", "system", ] } # Only include when on linux/windows/android/freebsd [target.'cfg(any(target_os = "linux", target_os = "windows", target_os = "android", target_os = "freebsd"))'.dependencies] -sysinfo = { version = "0.31.0", optional = true, default-features = false, features = [ +sysinfo = { version = "0.32.0", optional = true, default-features = false, features = [ "system", ] } diff --git a/crates/bevy_diagnostic/src/diagnostic.rs b/crates/bevy_diagnostic/src/diagnostic.rs index ccd6e17df175dd..c9fbe5d600eea6 100644 --- a/crates/bevy_diagnostic/src/diagnostic.rs +++ b/crates/bevy_diagnostic/src/diagnostic.rs @@ -3,7 +3,7 @@ use core::hash::{Hash, Hasher}; use bevy_app::{App, SubApp}; use bevy_ecs::system::{Deferred, Res, Resource, SystemBuffer, SystemParam}; -use bevy_utils::{hashbrown::HashMap, Duration, Instant, PassHash}; +use bevy_utils::{Duration, HashMap, Instant, PassHash}; use const_fnv1a_hash::fnv1a_hash_str_64; use crate::DEFAULT_MAX_HISTORY_LENGTH; diff --git a/crates/bevy_diagnostic/src/frame_time_diagnostics_plugin.rs b/crates/bevy_diagnostic/src/frame_time_diagnostics_plugin.rs index 99a72706176cf2..040a05773c830b 100644 --- a/crates/bevy_diagnostic/src/frame_time_diagnostics_plugin.rs +++ b/crates/bevy_diagnostic/src/frame_time_diagnostics_plugin.rs @@ -33,7 +33,7 @@ impl FrameTimeDiagnosticsPlugin { ) { diagnostics.add_measurement(&Self::FRAME_COUNT, || frame_count.0 as f64); - let delta_seconds = time.delta_seconds_f64(); + let delta_seconds = time.delta_secs_f64(); if delta_seconds == 0.0 { return; } diff --git a/crates/bevy_diagnostic/src/system_information_diagnostics_plugin.rs b/crates/bevy_diagnostic/src/system_information_diagnostics_plugin.rs index b108f34f3e5dae..975d0f04b745cc 100644 --- a/crates/bevy_diagnostic/src/system_information_diagnostics_plugin.rs +++ b/crates/bevy_diagnostic/src/system_information_diagnostics_plugin.rs @@ -119,7 +119,7 @@ pub mod internal { let thread_pool = AsyncComputeTaskPool::get(); // Only queue a new system refresh task when necessary - // Queueing earlier than that will not give new data + // Queuing earlier than that will not give new data if last_refresh.elapsed() > sysinfo::MINIMUM_CPU_UPDATE_INTERVAL // These tasks don't yield and will take up all of the task pool's // threads if we don't limit their amount. diff --git a/crates/bevy_ecs/Cargo.toml b/crates/bevy_ecs/Cargo.toml index 9b10c1f981e325..166c0178e463e9 100644 --- a/crates/bevy_ecs/Cargo.toml +++ b/crates/bevy_ecs/Cargo.toml @@ -8,7 +8,7 @@ repository = "https://github.com/bevyengine/bevy" license = "MIT OR Apache-2.0" keywords = ["ecs", "game", "bevy"] categories = ["game-engines", "data-structures"] -rust-version = "1.77.0" +rust-version = "1.81.0" [features] default = ["bevy_reflect"] @@ -18,6 +18,7 @@ bevy_debug_stepping = [] serialize = ["dep:serde"] track_change_detection = [] reflect_functions = ["bevy_reflect", "bevy_reflect/functions"] +detailed_trace = [] [dependencies] bevy_ptr = { path = "../bevy_ptr", version = "0.15.0-dev" } @@ -26,16 +27,23 @@ bevy_tasks = { path = "../bevy_tasks", version = "0.15.0-dev" } bevy_utils = { path = "../bevy_utils", version = "0.15.0-dev" } bevy_ecs_macros = { path = "macros", version = "0.15.0-dev" } -petgraph = "0.6" bitflags = "2.3" -concurrent-queue = "2.4.0" +concurrent-queue = "2.5.0" disqualified = "1.0" fixedbitset = "0.5" serde = { version = "1", optional = true, default-features = false } -thiserror = "1.0" +thiserror = { version = "2", default-features = false } +derive_more = { version = "1", default-features = false, features = [ + "from", + "display", + "into", + "as_ref", +] } nonmax = "0.5" arrayvec = { version = "0.7.4", optional = true } -smallvec = "1" +smallvec = { version = "1", features = ["union"] } +indexmap = { version = "2.5.0", default-features = false, features = ["std"] } +variadics_please = "1.0" [dev-dependencies] rand = "0.8" diff --git a/crates/bevy_ecs/README.md b/crates/bevy_ecs/README.md index a85fc07900a7bb..b947644bcd346c 100644 --- a/crates/bevy_ecs/README.md +++ b/crates/bevy_ecs/README.md @@ -315,7 +315,7 @@ struct MyEvent { let mut world = World::new(); -world.observe(|trigger: Trigger| { +world.add_observer(|trigger: Trigger| { println!("{}", trigger.event().message); }); @@ -339,9 +339,9 @@ struct Explode; let mut world = World::new(); let entity = world.spawn_empty().id(); -world.observe(|trigger: Trigger, mut commands: Commands| { - println!("Entity {:?} goes BOOM!", trigger.entity()); - commands.entity(trigger.entity()).despawn(); +world.add_observer(|trigger: Trigger, mut commands: Commands| { + println!("Entity {:?} goes BOOM!", trigger.target()); + commands.entity(trigger.target()).despawn(); }); world.flush(); diff --git a/crates/bevy_ecs/compile_fail/tests/ui/world_query_derive.rs b/crates/bevy_ecs/compile_fail/tests/ui/world_query_derive.rs index d990f59ecc3c37..44e43430473f9d 100644 --- a/crates/bevy_ecs/compile_fail/tests/ui/world_query_derive.rs +++ b/crates/bevy_ecs/compile_fail/tests/ui/world_query_derive.rs @@ -10,6 +10,27 @@ struct MutableUnmarked { a: &'static mut Foo, } +#[derive(QueryData)] +#[query_data(mut)] +//~^ ERROR: invalid attribute, expected `mutable` or `derive` +struct MutableInvalidAttribute { + a: &'static mut Foo, +} + +#[derive(QueryData)] +#[query_data(mutable(foo))] +//~^ ERROR: `mutable` does not take any arguments +struct MutableInvalidAttributeParameters { + a: &'static mut Foo, +} + +#[derive(QueryData)] +#[query_data(derive)] +//~^ ERROR: `derive` requires at least one argument +struct MutableMissingAttributeParameters { + a: &'static mut Foo, +} + #[derive(QueryData)] #[query_data(mutable)] struct MutableMarked { diff --git a/crates/bevy_ecs/macros/src/component.rs b/crates/bevy_ecs/macros/src/component.rs index de41596f97a8ed..84c69467a797f0 100644 --- a/crates/bevy_ecs/macros/src/component.rs +++ b/crates/bevy_ecs/macros/src/component.rs @@ -1,4 +1,4 @@ -use proc_macro::TokenStream; +use proc_macro::{TokenStream, TokenTree}; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::{quote, ToTokens}; use std::collections::HashSet; @@ -9,7 +9,7 @@ use syn::{ punctuated::Punctuated, spanned::Spanned, token::{Comma, Paren}, - DeriveInput, ExprPath, Ident, LitStr, Path, Result, + DeriveInput, ExprClosure, ExprPath, Ident, LitStr, Path, Result, }; pub fn derive_event(input: TokenStream) -> TokenStream { @@ -32,6 +32,7 @@ pub fn derive_event(input: TokenStream) -> TokenStream { impl #impl_generics #bevy_ecs_path::component::Component for #struct_name #type_generics #where_clause { const STORAGE_TYPE: #bevy_ecs_path::component::StorageType = #bevy_ecs_path::component::StorageType::SparseSet; + type Mutability = #bevy_ecs_path::component::Mutable; } }) } @@ -82,65 +83,80 @@ pub fn derive_component(input: TokenStream) -> TokenStream { for require in requires { let ident = &require.path; register_recursive_requires.push(quote! { - <#ident as Component>::register_required_components( + <#ident as #bevy_ecs_path::component::Component>::register_required_components( requiree, components, storages, required_components, - inheritance_depth + 1 + inheritance_depth + 1, + recursion_check_stack ); }); - if let Some(func) = &require.func { - register_required.push(quote! { - components.register_required_components_manual::( - storages, - required_components, - || { let x: #ident = #func().into(); x }, - inheritance_depth - ); - }); - } else { - register_required.push(quote! { - components.register_required_components_manual::( - storages, - required_components, - <#ident as Default>::default, - inheritance_depth - ); - }); + match &require.func { + Some(RequireFunc::Path(func)) => { + register_required.push(quote! { + components.register_required_components_manual::( + storages, + required_components, + || { let x: #ident = #func().into(); x }, + inheritance_depth, + recursion_check_stack + ); + }); + } + Some(RequireFunc::Closure(func)) => { + register_required.push(quote! { + components.register_required_components_manual::( + storages, + required_components, + || { let x: #ident = (#func)().into(); x }, + inheritance_depth, + recursion_check_stack + ); + }); + } + None => { + register_required.push(quote! { + components.register_required_components_manual::( + storages, + required_components, + <#ident as Default>::default, + inheritance_depth, + recursion_check_stack + ); + }); + } } } } let struct_name = &ast.ident; let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl(); - let required_component_docs = attrs.requires.map(|r| { - let paths = r - .iter() - .map(|r| format!("[`{}`]", r.path.to_token_stream())) - .collect::>() - .join(", "); - let doc = format!("Required Components: {paths}. \n\n A component's Required Components are inserted whenever it is inserted. Note that this will also insert the required components _of_ the required components, recursively, in depth-first order."); - quote! { - #[doc = #doc] - } - }); + let mutable_type = attrs + .immutable + .then_some(quote! { #bevy_ecs_path::component::Immutable }) + .unwrap_or(quote! { #bevy_ecs_path::component::Mutable }); // This puts `register_required` before `register_recursive_requires` to ensure that the constructors of _all_ top // level components are initialized first, giving them precedence over recursively defined constructors for the same component type TokenStream::from(quote! { - #required_component_docs impl #impl_generics #bevy_ecs_path::component::Component for #struct_name #type_generics #where_clause { const STORAGE_TYPE: #bevy_ecs_path::component::StorageType = #storage; + type Mutability = #mutable_type; fn register_required_components( requiree: #bevy_ecs_path::component::ComponentId, components: &mut #bevy_ecs_path::component::Components, storages: &mut #bevy_ecs_path::storage::Storages, required_components: &mut #bevy_ecs_path::component::RequiredComponents, inheritance_depth: u16, + recursion_check_stack: &mut Vec<#bevy_ecs_path::component::ComponentId> ) { + #bevy_ecs_path::component::enforce_no_required_components_recursion(components, recursion_check_stack); + let self_id = components.register_component::(storages); + recursion_check_stack.push(self_id); #(#register_required)* #(#register_recursive_requires)* + recursion_check_stack.pop(); } #[allow(unused_variables)] @@ -150,10 +166,44 @@ pub fn derive_component(input: TokenStream) -> TokenStream { #on_replace #on_remove } + + fn get_component_clone_handler() -> #bevy_ecs_path::component::ComponentCloneHandler { + use #bevy_ecs_path::component::{ComponentCloneViaClone, ComponentCloneBase}; + (&&&#bevy_ecs_path::component::ComponentCloneSpecializationWrapper::::default()) + .get_component_clone_handler() + } } }) } +pub fn document_required_components(attr: TokenStream, item: TokenStream) -> TokenStream { + let paths = parse_macro_input!(attr with Punctuated::::parse_terminated) + .iter() + .map(|r| format!("[`{}`]", r.path.to_token_stream())) + .collect::>() + .join(", "); + + let bevy_ecs_path = crate::bevy_ecs_path() + .to_token_stream() + .to_string() + .replace(' ', ""); + let required_components_path = bevy_ecs_path + "::component::Component#required-components"; + + // Insert information about required components after any existing doc comments + let mut out = TokenStream::new(); + let mut end_of_attributes_reached = false; + for tt in item { + if !end_of_attributes_reached & matches!(tt, TokenTree::Ident(_)) { + end_of_attributes_reached = true; + let doc: TokenStream = format!("#[doc = \"\n\n# Required Components\n{paths} \n\n A component's [required components]({required_components_path}) are inserted whenever it is inserted. Note that this will also insert the required components _of_ the required components, recursively, in depth-first order.\"]").parse().unwrap(); + out.extend(doc); + } + out.extend(Some(tt)); + } + + out +} + pub const COMPONENT: &str = "component"; pub const STORAGE: &str = "storage"; pub const REQUIRE: &str = "require"; @@ -163,6 +213,8 @@ pub const ON_INSERT: &str = "on_insert"; pub const ON_REPLACE: &str = "on_replace"; pub const ON_REMOVE: &str = "on_remove"; +pub const IMMUTABLE: &str = "immutable"; + struct Attrs { storage: StorageTy, requires: Option>, @@ -170,6 +222,7 @@ struct Attrs { on_insert: Option, on_replace: Option, on_remove: Option, + immutable: bool, } #[derive(Clone, Copy)] @@ -180,7 +233,12 @@ enum StorageTy { struct Require { path: Path, - func: Option, + func: Option, +} + +enum RequireFunc { + Path(Path), + Closure(ExprClosure), } // values for `storage` attribute @@ -195,6 +253,7 @@ fn parse_component_attr(ast: &DeriveInput) -> Result { on_replace: None, on_remove: None, requires: None, + immutable: false, }; let mut require_paths = HashSet::new(); @@ -224,6 +283,9 @@ fn parse_component_attr(ast: &DeriveInput) -> Result { } else if nested.path.is_ident(ON_REMOVE) { attrs.on_remove = Some(nested.value()?.parse::()?); Ok(()) + } else if nested.path.is_ident(IMMUTABLE) { + attrs.immutable = true; + Ok(()) } else { Err(nested.error("Unsupported attribute")) } @@ -256,8 +318,12 @@ impl Parse for Require { let func = if input.peek(Paren) { let content; parenthesized!(content in input); - let func = content.parse::()?; - Some(func) + if let Ok(func) = content.parse::() { + Some(RequireFunc::Closure(func)) + } else { + let func = content.parse::()?; + Some(RequireFunc::Path(func)) + } } else { None }; diff --git a/crates/bevy_ecs/macros/src/lib.rs b/crates/bevy_ecs/macros/src/lib.rs index 10f794075466ce..c3a256fef8629d 100644 --- a/crates/bevy_ecs/macros/src/lib.rs +++ b/crates/bevy_ecs/macros/src/lib.rs @@ -721,11 +721,19 @@ pub fn derive_resource(input: TokenStream) -> TokenStream { component::derive_resource(input) } -#[proc_macro_derive(Component, attributes(component, require))] +#[proc_macro_derive(Component, attributes(component))] pub fn derive_component(input: TokenStream) -> TokenStream { component::derive_component(input) } +/// Allows specifying a component's required components. +/// +/// See `Component` docs for usage. +#[proc_macro_attribute] +pub fn require(attr: TokenStream, item: TokenStream) -> TokenStream { + component::document_required_components(attr, item) +} + #[proc_macro_derive(States)] pub fn derive_states(input: TokenStream) -> TokenStream { states::derive_states(input) diff --git a/crates/bevy_ecs/macros/src/query_data.rs b/crates/bevy_ecs/macros/src/query_data.rs index 0a78b705a062ec..3f198b1ad1b18f 100644 --- a/crates/bevy_ecs/macros/src/query_data.rs +++ b/crates/bevy_ecs/macros/src/query_data.rs @@ -1,13 +1,10 @@ use bevy_macro_utils::ensure_no_collision; use proc_macro::TokenStream; use proc_macro2::{Ident, Span}; -use quote::{format_ident, quote, ToTokens}; +use quote::{format_ident, quote}; use syn::{ - parse::{Parse, ParseStream}, - parse_macro_input, parse_quote, - punctuated::Punctuated, - token::Comma, - Attribute, Data, DataStruct, DeriveInput, Field, Index, Meta, + parse_macro_input, parse_quote, punctuated::Punctuated, token, token::Comma, Attribute, Data, + DataStruct, DeriveInput, Field, Index, Meta, }; use crate::{ @@ -47,45 +44,29 @@ pub fn derive_query_data_impl(input: TokenStream) -> TokenStream { continue; } - attr.parse_args_with(|input: ParseStream| { - let meta = input.parse_terminated(Meta::parse, Comma)?; - for meta in meta { - let ident = meta.path().get_ident().unwrap_or_else(|| { - panic!( - "Unrecognized attribute: `{}`", - meta.path().to_token_stream() - ) - }); - if ident == MUTABLE_ATTRIBUTE_NAME { - if let Meta::Path(_) = meta { - attributes.is_mutable = true; - } else { - panic!( - "The `{MUTABLE_ATTRIBUTE_NAME}` attribute is expected to have no value or arguments", - ); - } - } - else if ident == DERIVE_ATTRIBUTE_NAME { - if let Meta::List(meta_list) = meta { - meta_list.parse_nested_meta(|meta| { - attributes.derive_args.push(Meta::Path(meta.path)); - Ok(()) - })?; - } else { - panic!( - "Expected a structured list within the `{DERIVE_ATTRIBUTE_NAME}` attribute", - ); - } + let result = attr.parse_nested_meta(|meta| { + if meta.path.is_ident(MUTABLE_ATTRIBUTE_NAME) { + attributes.is_mutable = true; + if meta.input.peek(token::Paren) { + Err(meta.error(format_args!("`{MUTABLE_ATTRIBUTE_NAME}` does not take any arguments"))) } else { - panic!( - "Unrecognized attribute: `{}`", - meta.path().to_token_stream() - ); + Ok(()) } + } else if meta.path.is_ident(DERIVE_ATTRIBUTE_NAME) { + meta.parse_nested_meta(|meta| { + attributes.derive_args.push(Meta::Path(meta.path)); + Ok(()) + }).map_err(|_| { + meta.error(format_args!("`{DERIVE_ATTRIBUTE_NAME}` requires at least one argument")) + }) + } else { + Err(meta.error(format_args!("invalid attribute, expected `{MUTABLE_ATTRIBUTE_NAME}` or `{DERIVE_ATTRIBUTE_NAME}`"))) } - Ok(()) - }) - .unwrap_or_else(|_| panic!("Invalid `{QUERY_DATA_ATTRIBUTE_NAME}` attribute format")); + }); + + if let Err(err) = result { + return err.to_compile_error().into(); + } } let path = bevy_ecs_path(); diff --git a/crates/bevy_ecs/src/archetype.rs b/crates/bevy_ecs/src/archetype.rs index 3f5456370258cd..022e550948d663 100644 --- a/crates/bevy_ecs/src/archetype.rs +++ b/crates/bevy_ecs/src/archetype.rs @@ -135,15 +135,15 @@ pub(crate) struct AddBundle { } impl AddBundle { - pub(crate) fn iter_inserted(&self) -> impl Iterator + '_ { + pub(crate) fn iter_inserted(&self) -> impl Iterator + Clone + '_ { self.added.iter().chain(self.existing.iter()).copied() } - pub(crate) fn iter_added(&self) -> impl Iterator + '_ { + pub(crate) fn iter_added(&self) -> impl Iterator + Clone + '_ { self.added.iter().copied() } - pub(crate) fn iter_existing(&self) -> impl Iterator + '_ { + pub(crate) fn iter_existing(&self) -> impl Iterator + Clone + '_ { self.existing.iter().copied() } } @@ -402,7 +402,7 @@ impl Archetype { // component in the `table_components` vector component_index .entry(component_id) - .or_insert_with(HashMap::new) + .or_default() .insert(id, ArchetypeRecord { column: Some(idx) }); } @@ -420,7 +420,7 @@ impl Archetype { ); component_index .entry(component_id) - .or_insert_with(HashMap::new) + .or_default() .insert(id, ArchetypeRecord { column: None }); } Self { @@ -489,7 +489,7 @@ impl Archetype { /// /// All of the IDs are unique. #[inline] - pub fn components(&self) -> impl Iterator + '_ { + pub fn components(&self) -> impl Iterator + Clone + '_ { self.components.indices() } diff --git a/crates/bevy_ecs/src/batching.rs b/crates/bevy_ecs/src/batching.rs index 4253d8af34c29f..cdffbfa05dd361 100644 --- a/crates/bevy_ecs/src/batching.rs +++ b/crates/bevy_ecs/src/batching.rs @@ -101,7 +101,7 @@ impl BatchingStrategy { ); let batches = thread_count * self.batches_per_thread; // Round up to the nearest batch size. - let batch_size = (max_items() + batches - 1) / batches; + let batch_size = max_items().div_ceil(batches); batch_size.clamp(self.batch_size_limits.start, self.batch_size_limits.end) } } diff --git a/crates/bevy_ecs/src/bundle.rs b/crates/bevy_ecs/src/bundle.rs index c90eb124d37180..496d1cbd712d23 100644 --- a/crates/bevy_ecs/src/bundle.rs +++ b/crates/bevy_ecs/src/bundle.rs @@ -21,9 +21,11 @@ use crate::{ world::{unsafe_world_cell::UnsafeWorldCell, ON_ADD, ON_INSERT, ON_REPLACE}, }; use bevy_ptr::{ConstNonNull, OwningPtr}; -use bevy_utils::{all_tuples, HashMap, HashSet, TypeIdMap}; +use bevy_utils::{HashMap, HashSet, TypeIdMap}; +#[cfg(feature = "track_change_detection")] use core::panic::Location; use core::{any::TypeId, ptr::NonNull}; +use variadics_please::all_tuples; /// The `Bundle` trait enables insertion and removal of [`Component`]s from an entity. /// @@ -31,7 +33,7 @@ use core::{any::TypeId, ptr::NonNull}; /// /// Each bundle represents a static set of [`Component`] types. /// Currently, bundles can only contain one of each [`Component`], and will -/// panic once initialised if this is not met. +/// panic once initialized if this is not met. /// /// ## Insertion /// @@ -54,9 +56,6 @@ use core::{any::TypeId, ptr::NonNull}; /// would create incoherent behavior. /// This would be unexpected if bundles were treated as an abstraction boundary, as /// the abstraction would be unmaintainable for these cases. -/// For example, both `Camera3dBundle` and `Camera2dBundle` contain the `CameraRenderGraph` -/// component, but specifying different render graphs to use. -/// If the bundles were both added to the same entity, only one of these two bundles would work. /// /// For this reason, there is intentionally no [`Query`] to match whether an entity /// contains the components of a bundle. @@ -79,8 +78,6 @@ use core::{any::TypeId, ptr::NonNull}; /// Additionally, [Tuples](`tuple`) of bundles are also [`Bundle`] (with up to 15 bundles). /// These bundles contain the items of the 'inner' bundles. /// This is a convenient shorthand which is primarily used when spawning entities. -/// For example, spawning an entity using the bundle `(SpriteBundle {...}, PlayerMarker)` -/// will spawn an entity with components required for a 2d sprite, and the `PlayerMarker` component. /// /// [`unit`], otherwise known as [`()`](`unit`), is a [`Bundle`] containing no components (since it /// can also be considered as the empty tuple). @@ -230,6 +227,7 @@ unsafe impl Bundle for C { storages, required_components, 0, + &mut Vec::new(), ); } @@ -384,7 +382,7 @@ impl BundleInfo { if deduped.len() != component_ids.len() { // TODO: Replace with `Vec::partition_dedup` once https://github.com/rust-lang/rust/issues/54279 is stabilized - let mut seen = HashSet::new(); + let mut seen = >::default(); let mut dups = Vec::new(); for id in component_ids { if !seen.insert(id) { @@ -465,10 +463,9 @@ impl BundleInfo { } /// Returns an iterator over the [ID](ComponentId) of each component explicitly defined in this bundle (ex: this excludes Required Components). - /// To iterate all components contributed by this bundle (including Required Components), see [`BundleInfo::iter_contributed_components`] #[inline] - pub fn iter_explicit_components(&self) -> impl Iterator + '_ { + pub fn iter_explicit_components(&self) -> impl Iterator + Clone + '_ { self.explicit_components().iter().copied() } @@ -476,7 +473,7 @@ impl BundleInfo { /// /// To iterate only components explicitly defined in this bundle, see [`BundleInfo::iter_explicit_components`] #[inline] - pub fn iter_contributed_components(&self) -> impl Iterator + '_ { + pub fn iter_contributed_components(&self) -> impl Iterator + Clone + '_ { self.component_ids.iter().copied() } @@ -892,7 +889,7 @@ impl<'w> BundleInserter<'w> { location: EntityLocation, bundle: T, insert_mode: InsertMode, - caller: &'static Location<'static>, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, ) -> EntityLocation { let bundle_info = self.bundle_info.as_ref(); let add_bundle = self.add_bundle.as_ref(); @@ -906,20 +903,22 @@ impl<'w> BundleInserter<'w> { let mut deferred_world = self.world.into_deferred(); if insert_mode == InsertMode::Replace { - deferred_world.trigger_on_replace( - archetype, - entity, - add_bundle.iter_existing(), - caller, - ); if archetype.has_replace_observer() { deferred_world.trigger_observers( ON_REPLACE, entity, add_bundle.iter_existing(), + #[cfg(feature = "track_change_detection")] caller, ); } + deferred_world.trigger_on_replace( + archetype, + entity, + add_bundle.iter_existing(), + #[cfg(feature = "track_change_detection")] + caller, + ); } } @@ -1085,9 +1084,21 @@ impl<'w> BundleInserter<'w> { // SAFETY: All components in the bundle are guaranteed to exist in the World // as they must be initialized before creating the BundleInfo. unsafe { - deferred_world.trigger_on_add(new_archetype, entity, add_bundle.iter_added(), caller); + deferred_world.trigger_on_add( + new_archetype, + entity, + add_bundle.iter_added(), + #[cfg(feature = "track_change_detection")] + caller, + ); if new_archetype.has_add_observer() { - deferred_world.trigger_observers(ON_ADD, entity, add_bundle.iter_added(), caller); + deferred_world.trigger_observers( + ON_ADD, + entity, + add_bundle.iter_added(), + #[cfg(feature = "track_change_detection")] + caller, + ); } match insert_mode { InsertMode::Replace => { @@ -1096,6 +1107,7 @@ impl<'w> BundleInserter<'w> { new_archetype, entity, add_bundle.iter_inserted(), + #[cfg(feature = "track_change_detection")] caller, ); if new_archetype.has_insert_observer() { @@ -1103,6 +1115,7 @@ impl<'w> BundleInserter<'w> { ON_INSERT, entity, add_bundle.iter_inserted(), + #[cfg(feature = "track_change_detection")] caller, ); } @@ -1114,6 +1127,7 @@ impl<'w> BundleInserter<'w> { new_archetype, entity, add_bundle.iter_added(), + #[cfg(feature = "track_change_detection")] caller, ); if new_archetype.has_insert_observer() { @@ -1121,6 +1135,7 @@ impl<'w> BundleInserter<'w> { ON_INSERT, entity, add_bundle.iter_added(), + #[cfg(feature = "track_change_detection")] caller, ); } @@ -1197,11 +1212,25 @@ impl<'w> BundleSpawner<'w> { /// # Safety /// `entity` must be allocated (but non-existent), `T` must match this [`BundleInfo`]'s type #[inline] + #[track_caller] pub unsafe fn spawn_non_existent( &mut self, entity: Entity, bundle: T, - caller: &'static Location<'static>, + ) -> EntityLocation { + self.spawn_non_existent_with_caller( + entity, + bundle, + #[cfg(feature = "track_change_detection")] + Location::caller(), + ) + } + + pub(crate) unsafe fn spawn_non_existent_with_caller( + &mut self, + entity: Entity, + bundle: T, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, ) -> EntityLocation { // SAFETY: We do not make any structural changes to the archetype graph through self.world so these pointers always remain valid let bundle_info = self.bundle_info.as_ref(); @@ -1244,6 +1273,7 @@ impl<'w> BundleSpawner<'w> { archetype, entity, bundle_info.iter_contributed_components(), + #[cfg(feature = "track_change_detection")] caller, ); if archetype.has_add_observer() { @@ -1251,6 +1281,7 @@ impl<'w> BundleSpawner<'w> { ON_ADD, entity, bundle_info.iter_contributed_components(), + #[cfg(feature = "track_change_detection")] caller, ); } @@ -1258,6 +1289,7 @@ impl<'w> BundleSpawner<'w> { archetype, entity, bundle_info.iter_contributed_components(), + #[cfg(feature = "track_change_detection")] caller, ); if archetype.has_insert_observer() { @@ -1265,6 +1297,7 @@ impl<'w> BundleSpawner<'w> { ON_INSERT, entity, bundle_info.iter_contributed_components(), + #[cfg(feature = "track_change_detection")] caller, ); } @@ -1279,12 +1312,17 @@ impl<'w> BundleSpawner<'w> { pub unsafe fn spawn( &mut self, bundle: T, - caller: &'static Location<'static>, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, ) -> Entity { let entity = self.entities().alloc(); // SAFETY: entity is allocated (but non-existent), `T` matches this BundleInfo's type unsafe { - self.spawn_non_existent(entity, bundle, caller); + self.spawn_non_existent_with_caller( + entity, + bundle, + #[cfg(feature = "track_change_detection")] + caller, + ); } entity } @@ -1310,6 +1348,8 @@ pub struct Bundles { bundle_infos: Vec, /// Cache static [`BundleId`] bundle_ids: TypeIdMap, + /// Cache bundles, which contains both explicit and required components of [`Bundle`] + contributed_bundle_ids: TypeIdMap, /// Cache dynamic [`BundleId`] with multiple components dynamic_bundle_ids: HashMap, BundleId>, dynamic_bundle_storages: HashMap>, @@ -1359,6 +1399,36 @@ impl Bundles { id } + /// Registers a new [`BundleInfo`], which contains both explicit and required components for a statically known type. + /// + /// Also registers all the components in the bundle. + pub(crate) fn register_contributed_bundle_info( + &mut self, + components: &mut Components, + storages: &mut Storages, + ) -> BundleId { + if let Some(id) = self.contributed_bundle_ids.get(&TypeId::of::()).cloned() { + id + } else { + let explicit_bundle_id = self.register_info::(components, storages); + // SAFETY: reading from `explicit_bundle_id` and creating new bundle in same time. Its valid because bundle hashmap allow this + let id = unsafe { + let (ptr, len) = { + // SAFETY: `explicit_bundle_id` is valid and defined above + let contributed = self + .get_unchecked(explicit_bundle_id) + .contributed_components(); + (contributed.as_ptr(), contributed.len()) + }; + // SAFETY: this is sound because the contributed_components Vec for explicit_bundle_id will not be accessed mutably as + // part of init_dynamic_info. No mutable references will be created and the allocation will remain valid. + self.init_dynamic_info(components, core::slice::from_raw_parts(ptr, len)) + }; + self.contributed_bundle_ids.insert(TypeId::of::(), id); + id + } + } + /// # Safety /// A [`BundleInfo`] with the given [`BundleId`] must have been initialized for this instance of `Bundles`. pub(crate) unsafe fn get_unchecked(&self, id: BundleId) -> &BundleInfo { @@ -1403,8 +1473,11 @@ impl Bundles { .or_insert_with(|| { let (id, storages) = initialize_dynamic_bundle(bundle_infos, components, Vec::from(component_ids)); - self.dynamic_bundle_storages - .insert_unique_unchecked(id, storages); + // SAFETY: The ID always increases when new bundles are added, and so, the ID is unique. + unsafe { + self.dynamic_bundle_storages + .insert_unique_unchecked(id, storages); + } (component_ids.into(), id) }); *bundle_id @@ -1476,20 +1549,35 @@ mod tests { mut world: DeferredWorld, _: Entity, _: ComponentId, - _: &'static Location<'static>, + _: Option<&'static Location<'static>>, ) { world.resource_mut::().assert_order(0); } - fn a_on_insert(mut world: DeferredWorld, _: T1, _: T2, _: &'static Location<'static>) { + fn a_on_insert( + mut world: DeferredWorld, + _: T1, + _: T2, + _: Option<&'static Location<'static>>, + ) { world.resource_mut::().assert_order(1); } - fn a_on_replace(mut world: DeferredWorld, _: T1, _: T2, _: &'static Location<'static>) { + fn a_on_replace( + mut world: DeferredWorld, + _: T1, + _: T2, + _: Option<&'static Location<'static>>, + ) { world.resource_mut::().assert_order(2); } - fn a_on_remove(mut world: DeferredWorld, _: T1, _: T2, _: &'static Location<'static>) { + fn a_on_remove( + mut world: DeferredWorld, + _: T1, + _: T2, + _: Option<&'static Location<'static>>, + ) { world.resource_mut::().assert_order(3); } diff --git a/crates/bevy_ecs/src/change_detection.rs b/crates/bevy_ecs/src/change_detection.rs index 5a3adac96fe20a..025c3804e73e25 100644 --- a/crates/bevy_ecs/src/change_detection.rs +++ b/crates/bevy_ecs/src/change_detection.rs @@ -1206,7 +1206,7 @@ mod tests { Mut, NonSendMut, Ref, ResMut, TicksMut, CHECK_TICK_THRESHOLD, MAX_CHANGE_AGE, }, component::{Component, ComponentTicks, Tick}, - system::{IntoSystem, Query, System}, + system::{IntoSystem, Single, System}, world::World, }; @@ -1236,12 +1236,12 @@ mod tests { #[test] fn change_expiration() { - fn change_detected(query: Query>) -> bool { - query.single().is_changed() + fn change_detected(query: Option>>) -> bool { + query.unwrap().is_changed() } - fn change_expired(query: Query>) -> bool { - query.single().is_changed() + fn change_expired(query: Option>>) -> bool { + query.unwrap().is_changed() } let mut world = World::new(); diff --git a/crates/bevy_ecs/src/component.rs b/crates/bevy_ecs/src/component.rs index f2b88b8b1516a7..415869fac54249 100644 --- a/crates/bevy_ecs/src/component.rs +++ b/crates/bevy_ecs/src/component.rs @@ -5,7 +5,7 @@ use crate::{ archetype::ArchetypeFlags, bundle::BundleInfo, change_detection::MAX_CHANGE_AGE, - entity::Entity, + entity::{Entity, EntityCloner}, query::DebugCheckedUnwrap, storage::{SparseSetIndex, SparseSets, Storages, Table, TableRow}, system::{Local, Resource, SystemParam}, @@ -26,8 +26,11 @@ use core::{ mem::needs_drop, panic::Location, }; +use disqualified::ShortName; use thiserror::Error; +pub use bevy_ecs_macros::require; + /// A data type that can be used to store data for an [entity]. /// /// `Component` is a [derivable trait]: this means that a data type can implement it by applying a `#[derive(Component)]` attribute to it. @@ -72,12 +75,18 @@ use thiserror::Error; /// /// # Component and data access /// +/// Components can be marked as immutable by adding the `#[component(immutable)]` +/// attribute when using the derive macro. +/// See the documentation for [`ComponentMutability`] for more details around this +/// feature. +/// /// See the [`entity`] module level documentation to learn how to add or remove components from an entity. /// /// See the documentation for [`Query`] to learn how to access component data from a system. /// /// [`entity`]: crate::entity#usage /// [`Query`]: crate::system::Query +/// [`ComponentMutability`]: crate::component::ComponentMutability /// /// # Choosing a storage type /// @@ -145,25 +154,33 @@ use thiserror::Error; /// assert_eq!(&C(0), world.entity(id).get::().unwrap()); /// ``` /// -/// You can also define a custom constructor: +/// You can also define a custom constructor function or closure: /// /// ``` /// # use bevy_ecs::prelude::*; /// #[derive(Component)] -/// #[require(B(init_b))] +/// #[require(C(init_c))] /// struct A; /// /// #[derive(Component, PartialEq, Eq, Debug)] -/// struct B(usize); +/// #[require(C(|| C(20)))] +/// struct B; +/// +/// #[derive(Component, PartialEq, Eq, Debug)] +/// struct C(usize); /// -/// fn init_b() -> B { -/// B(10) +/// fn init_c() -> C { +/// C(10) /// } /// /// # let mut world = World::default(); -/// // This will implicitly also insert B with the init_b() constructor +/// // This will implicitly also insert C with the init_c() constructor /// let id = world.spawn(A).id(); -/// assert_eq!(&B(10), world.entity(id).get::().unwrap()); +/// assert_eq!(&C(10), world.entity(id).get::().unwrap()); +/// +/// // This will implicitly also insert C with the `|| C(20)` constructor closure +/// let id = world.spawn(B).id(); +/// assert_eq!(&C(20), world.entity(id).get::().unwrap()); /// ``` /// /// Required components are _recursive_. This means, if a Required Component has required components, @@ -201,33 +218,25 @@ use thiserror::Error; /// struct X(usize); /// /// #[derive(Component, Default)] -/// #[require(X(x1))] +/// #[require(X(|| X(1)))] /// struct Y; /// -/// fn x1() -> X { -/// X(1) -/// } -/// /// #[derive(Component)] /// #[require( /// Y, -/// X(x2), +/// X(|| X(2)), /// )] /// struct Z; /// -/// fn x2() -> X { -/// X(2) -/// } -/// /// # let mut world = World::default(); /// // In this case, the x2 constructor is used for X /// let id = world.spawn(Z).id(); /// assert_eq!(2, world.entity(id).get::().unwrap().0); /// ``` /// -/// In general, this shouldn't happen often, but when it does the algorithm is simple and predictable: -/// 1. Use all of the constructors (including default constructors) directly defined in the spawned component's require list -/// 2. In the order the requires are defined in `#[require()]`, recursively visit the require list of each of the components in the list (this is a depth Depth First Search). When a constructor is found, it will only be used if one has not already been found. +/// In general, this shouldn't happen often, but when it does the algorithm for choosing the constructor from the tree is simple and predictable: +/// 1. A constructor from a direct `#[require()]`, if one exists, is selected with priority. +/// 2. Otherwise, perform a Depth First Search on the tree of requirements and select the first one found. /// /// From a user perspective, just think about this as the following: /// 1. Specifying a required component constructor for Foo directly on a spawned component Bar will result in that constructor being used (and overriding existing constructors lower in the inheritance tree). This is the classic "inheritance override" behavior people expect. @@ -302,12 +311,12 @@ use thiserror::Error; /// // #[component(on_replace = my_on_replace_hook, on_remove = my_on_remove_hook)] /// struct ComponentA; /// -/// fn my_on_add_hook(world: DeferredWorld, entity: Entity, id: ComponentId, caller: &'static Location<'static>) { +/// fn my_on_add_hook(world: DeferredWorld, entity: Entity, id: ComponentId, caller: Option<&'static Location<'static>>) { /// // ... /// } /// /// // You can also omit writing some types using generics. -/// fn my_on_insert_hook(world: DeferredWorld, _: T1, _: T2, caller: &'static Location<'static>) { +/// fn my_on_insert_hook(world: DeferredWorld, _: T1, _: T2, caller: Option<&'static Location<'static>>) { /// // ... /// } /// ``` @@ -378,6 +387,14 @@ pub trait Component: Send + Sync + 'static { /// A constant indicating the storage type used for this component. const STORAGE_TYPE: StorageType; + /// A marker type to assist Bevy with determining if this component is + /// mutable, or immutable. Mutable components will have [`Component`], + /// while immutable components will instead have [`Component`]. + /// + /// * For a component to be mutable, this type must be [`Mutable`]. + /// * For a component to be immutable, this type must be [`Immutable`]. + type Mutability: ComponentMutability; + /// Called when registering this component, allowing mutable access to its [`ComponentHooks`]. fn register_component_hooks(_hooks: &mut ComponentHooks) {} @@ -388,8 +405,71 @@ pub trait Component: Send + Sync + 'static { _storages: &mut Storages, _required_components: &mut RequiredComponents, _inheritance_depth: u16, + _recursion_check_stack: &mut Vec, ) { } + + /// Called when registering this component, allowing to override clone function (or disable cloning altogether) for this component. + /// + /// See [Handlers section of `EntityCloneBuilder`](crate::entity::EntityCloneBuilder#handlers) to understand how this affects handler priority. + fn get_component_clone_handler() -> ComponentCloneHandler { + ComponentCloneHandler::default() + } +} + +mod private { + pub trait Seal {} +} + +/// The mutability option for a [`Component`]. This can either be: +/// * [`Mutable`] +/// * [`Immutable`] +/// +/// This is controlled through either [`Component::Mutability`] or `#[component(immutable)]` +/// when using the derive macro. +/// +/// Immutable components are guaranteed to never have an exclusive reference, +/// `&mut ...`, created while inserted onto an entity. +/// In all other ways, they are identical to mutable components. +/// This restriction allows hooks to observe all changes made to an immutable +/// component, effectively turning the `OnInsert` and `OnReplace` hooks into a +/// `OnMutate` hook. +/// This is not practical for mutable components, as the runtime cost of invoking +/// a hook for every exclusive reference created would be far too high. +/// +/// # Examples +/// +/// ```rust +/// # use bevy_ecs::component::Component; +/// # +/// #[derive(Component)] +/// #[component(immutable)] +/// struct ImmutableFoo; +/// ``` +pub trait ComponentMutability: private::Seal + 'static { + /// Boolean to indicate if this mutability setting implies a mutable or immutable + /// component. + const MUTABLE: bool; +} + +/// Parameter indicating a [`Component`] is immutable. +/// +/// See [`ComponentMutability`] for details. +pub struct Immutable; + +impl private::Seal for Immutable {} +impl ComponentMutability for Immutable { + const MUTABLE: bool = false; +} + +/// Parameter indicating a [`Component`] is mutable. +/// +/// See [`ComponentMutability`] for details. +pub struct Mutable; + +impl private::Seal for Mutable {} +impl ComponentMutability for Mutable { + const MUTABLE: bool = true; } /// The storage used for a specific component type. @@ -415,7 +495,7 @@ pub enum StorageType { /// The type used for [`Component`] lifecycle hooks such as `on_add`, `on_insert` or `on_remove` pub type ComponentHook = - for<'w> fn(DeferredWorld<'w>, Entity, ComponentId, &'static Location<'static>); + for<'w> fn(DeferredWorld<'w>, Entity, ComponentId, Option<&'static Location<'static>>); /// [`World`]-mutating functions that run as part of lifecycle events of a [`Component`]. /// @@ -488,7 +568,7 @@ impl ComponentHooks { /// Will panic if the component already has an `on_add` hook pub fn on_add(&mut self, hook: ComponentHook) -> &mut Self { self.try_on_add(hook) - .expect("Component id: {:?}, already has an on_add hook") + .expect("Component already has an on_add hook") } /// Register a [`ComponentHook`] that will be run when this component is added (with `.insert`) @@ -506,7 +586,7 @@ impl ComponentHooks { /// Will panic if the component already has an `on_insert` hook pub fn on_insert(&mut self, hook: ComponentHook) -> &mut Self { self.try_on_insert(hook) - .expect("Component id: {:?}, already has an on_insert hook") + .expect("Component already has an on_insert hook") } /// Register a [`ComponentHook`] that will be run when this component is about to be dropped, @@ -528,7 +608,7 @@ impl ComponentHooks { /// Will panic if the component already has an `on_replace` hook pub fn on_replace(&mut self, hook: ComponentHook) -> &mut Self { self.try_on_replace(hook) - .expect("Component id: {:?}, already has an on_replace hook") + .expect("Component already has an on_replace hook") } /// Register a [`ComponentHook`] that will be run when this component is removed from an entity. @@ -539,7 +619,7 @@ impl ComponentHooks { /// Will panic if the component already has an `on_remove` hook pub fn on_remove(&mut self, hook: ComponentHook) -> &mut Self { self.try_on_remove(hook) - .expect("Component id: {:?}, already has an on_remove hook") + .expect("Component already has an on_remove hook") } /// Attempt to register a [`ComponentHook`] that will be run when this component is added to an entity. @@ -618,6 +698,12 @@ impl ComponentInfo { &self.descriptor.name } + /// Returns `true` if the current component is mutable. + #[inline] + pub fn mutable(&self) -> bool { + self.descriptor.mutable + } + /// Returns the [`TypeId`] of the underlying component type. /// Returns `None` if the component does not correspond to a Rust type. #[inline] @@ -770,6 +856,7 @@ pub struct ComponentDescriptor { // this descriptor describes. // None if the underlying type doesn't need to be dropped drop: Option unsafe fn(OwningPtr<'a>)>, + mutable: bool, } // We need to ignore the `drop` field in our `Debug` impl @@ -781,6 +868,7 @@ impl Debug for ComponentDescriptor { .field("is_send_and_sync", &self.is_send_and_sync) .field("type_id", &self.type_id) .field("layout", &self.layout) + .field("mutable", &self.mutable) .finish() } } @@ -805,6 +893,7 @@ impl ComponentDescriptor { type_id: Some(TypeId::of::()), layout: Layout::new::(), drop: needs_drop::().then_some(Self::drop_ptr:: as _), + mutable: T::Mutability::MUTABLE, } } @@ -818,6 +907,7 @@ impl ComponentDescriptor { storage_type: StorageType, layout: Layout, drop: Option unsafe fn(OwningPtr<'a>)>, + mutable: bool, ) -> Self { Self { name: name.into(), @@ -826,6 +916,7 @@ impl ComponentDescriptor { type_id: None, layout, drop, + mutable, } } @@ -842,6 +933,7 @@ impl ComponentDescriptor { type_id: Some(TypeId::of::()), layout: Layout::new::(), drop: needs_drop::().then_some(Self::drop_ptr:: as _), + mutable: true, } } @@ -853,6 +945,7 @@ impl ComponentDescriptor { type_id: Some(TypeId::of::()), layout: Layout::new::(), drop: needs_drop::().then_some(Self::drop_ptr:: as _), + mutable: true, } } @@ -874,6 +967,95 @@ impl ComponentDescriptor { pub fn name(&self) -> &str { self.name.as_ref() } + + /// Returns whether this component is mutable. + #[inline] + pub fn mutable(&self) -> bool { + self.mutable + } +} + +/// Function type that can be used to clone an entity. +pub type ComponentCloneFn = fn(&mut DeferredWorld, &EntityCloner); + +/// An enum instructing how to clone a component. +#[derive(Debug, Default)] +pub enum ComponentCloneHandler { + #[default] + /// Use the global default function to clone the component with this handler. + Default, + /// Do not clone the component. When a command to clone an entity is issued, component with this handler will be skipped. + Ignore, + /// Set a custom handler for the component. + Custom(ComponentCloneFn), +} + +/// A registry of component clone handlers. Allows to set global default and per-component clone function for all components in the world. +#[derive(Debug)] +pub struct ComponentCloneHandlers { + handlers: Vec>, + default_handler: ComponentCloneFn, +} + +impl ComponentCloneHandlers { + /// Sets the default handler for this registry. All components with [`Default`](ComponentCloneHandler::Default) handler, as well as any component that does not have an + /// explicitly registered clone function will use this handler. + /// + /// See [Handlers section of `EntityCloneBuilder`](crate::entity::EntityCloneBuilder#handlers) to understand how this affects handler priority. + pub fn set_default_handler(&mut self, handler: ComponentCloneFn) { + self.default_handler = handler; + } + + /// Returns the currently registered default handler. + pub fn get_default_handler(&self) -> ComponentCloneFn { + self.default_handler + } + + /// Sets a handler for a specific component. + /// + /// See [Handlers section of `EntityCloneBuilder`](crate::entity::EntityCloneBuilder#handlers) to understand how this affects handler priority. + pub fn set_component_handler(&mut self, id: ComponentId, handler: ComponentCloneHandler) { + if id.0 >= self.handlers.len() { + self.handlers.resize(id.0 + 1, None); + } + match handler { + ComponentCloneHandler::Default => self.handlers[id.0] = None, + ComponentCloneHandler::Ignore => self.handlers[id.0] = Some(component_clone_ignore), + ComponentCloneHandler::Custom(handler) => self.handlers[id.0] = Some(handler), + }; + } + + /// Checks if the specified component is registered. If not, the component will use the default global handler. + /// + /// This will return an incorrect result if `id` did not come from the same world as `self`. + pub fn is_handler_registered(&self, id: ComponentId) -> bool { + self.handlers.get(id.0).is_some_and(Option::is_some) + } + + /// Gets a handler to clone a component. This can be one of the following: + /// - Custom clone function for this specific component. + /// - Default global handler. + /// - A [`component_clone_ignore`] (no cloning). + /// + /// This will return an incorrect result if `id` did not come from the same world as `self`. + pub fn get_handler(&self, id: ComponentId) -> ComponentCloneFn { + match self.handlers.get(id.0) { + Some(Some(handler)) => *handler, + Some(None) | None => self.default_handler, + } + } +} + +impl Default for ComponentCloneHandlers { + fn default() -> Self { + Self { + handlers: Default::default(), + #[cfg(feature = "bevy_reflect")] + default_handler: component_clone_via_reflect, + #[cfg(not(feature = "bevy_reflect"))] + default_handler: component_clone_ignore, + } + } } /// Stores metadata associated with each kind of [`Component`] in a given [`World`]. @@ -882,6 +1064,7 @@ pub struct Components { components: Vec, indices: TypeIdMap, resource_indices: TypeIdMap, + component_clone_handlers: ComponentCloneHandlers, } impl Components { @@ -895,7 +1078,16 @@ impl Components { /// * [`Components::register_component_with_descriptor()`] #[inline] pub fn register_component(&mut self, storages: &mut Storages) -> ComponentId { - let mut registered = false; + self.register_component_internal::(storages, &mut Vec::new()) + } + + #[inline] + fn register_component_internal( + &mut self, + storages: &mut Storages, + recursion_check_stack: &mut Vec, + ) -> ComponentId { + let mut is_new_registration = false; let id = { let Components { indices, @@ -909,16 +1101,26 @@ impl Components { storages, ComponentDescriptor::new::(), ); - registered = true; + is_new_registration = true; id }) }; - if registered { + if is_new_registration { let mut required_components = RequiredComponents::default(); - T::register_required_components(id, self, storages, &mut required_components, 0); + T::register_required_components( + id, + self, + storages, + &mut required_components, + 0, + recursion_check_stack, + ); let info = &mut self.components[id.index()]; T::register_component_hooks(&mut info.hooks); info.required_components = required_components; + let clone_handler = T::get_component_clone_handler(); + self.component_clone_handlers + .set_component_handler(id, clone_handler); } id } @@ -1030,8 +1232,8 @@ impl Components { /// registration will be used. pub(crate) unsafe fn register_required_components( &mut self, - required: ComponentId, requiree: ComponentId, + required: ComponentId, constructor: fn() -> R, ) -> Result<(), RequiredComponentsError> { // SAFETY: The caller ensures that the `requiree` is valid. @@ -1066,6 +1268,10 @@ impl Components { // Propagate the new required components up the chain to all components that require the requiree. if let Some(required_by) = self.get_required_by(requiree).cloned() { + // `required` is now required by anything that `requiree` was required by. + self.get_required_by_mut(required) + .unwrap() + .extend(required_by.iter().copied()); for &required_by_id in required_by.iter() { // SAFETY: The component is in the list of required components, so it must exist already. let required_components = unsafe { @@ -1073,20 +1279,24 @@ impl Components { .debug_checked_unwrap() }; - // Register the original required component for the requiree. - // The inheritance depth is `1` since this is a component required by the original requiree. - required_components.register_by_id(required, constructor, 1); + // Register the original required component in the "parent" of the requiree. + // The inheritance depth is 1 deeper than the `requiree` wrt `required_by_id`. + let depth = required_components.0.get(&requiree).expect("requiree is required by required_by_id, so its required_components must include requiree").inheritance_depth; + required_components.register_by_id(required, constructor, depth + 1); for (component_id, component) in inherited_requirements.iter() { // Register the required component. - // The inheritance depth is increased by `1` since this is a component required by the original required component. + // The inheritance depth of inherited components is whatever the requiree's + // depth is relative to `required_by_id`, plus the inheritance depth of the + // inherited component relative to the requiree, plus 1 to account for the + // requiree in between. // SAFETY: Component ID and constructor match the ones on the original requiree. // The original requiree is responsible for making sure the registration is safe. unsafe { required_components.register_dynamic( *component_id, component.constructor.clone(), - component.inheritance_depth + 1, + component.inheritance_depth + depth + 1, ); }; } @@ -1160,15 +1370,17 @@ impl Components { // NOTE: This should maybe be private, but it is currently public so that `bevy_ecs_macros` can use it. // We can't directly move this there either, because this uses `Components::get_required_by_mut`, // which is private, and could be equally risky to expose to users. - /// Registers the given component `R` as a [required component] for `T`, - /// and adds `T` to the list of requirees for `R`. + /// Registers the given component `R` and [required components] inherited from it as required by `T`, + /// and adds `T` to their lists of requirees. /// /// The given `inheritance_depth` determines how many levels of inheritance deep the requirement is. /// A direct requirement has a depth of `0`, and each level of inheritance increases the depth by `1`. /// Lower depths are more specific requirements, and can override existing less specific registrations. /// - /// This method does *not* recursively register required components for components required by `R`, - /// nor does it register them for components that require `T`. + /// The `recursion_check_stack` allows checking whether this component tried to register itself as its + /// own (indirect) required component. + /// + /// This method does *not* register any components as required by components that require `T`. /// /// Only use this method if you know what you are doing. In most cases, you should instead use [`World::register_required_components`], /// or the equivalent method in `bevy_app::App`. @@ -1181,9 +1393,10 @@ impl Components { required_components: &mut RequiredComponents, constructor: fn() -> R, inheritance_depth: u16, + recursion_check_stack: &mut Vec, ) { - let requiree = self.register_component::(storages); - let required = self.register_component::(storages); + let requiree = self.register_component_internal::(storages, recursion_check_stack); + let required = self.register_component_internal::(storages, recursion_check_stack); // SAFETY: We just created the components. unsafe { @@ -1197,15 +1410,14 @@ impl Components { } } - /// Registers the given component `R` as a [required component] for `T`, - /// and adds `T` to the list of requirees for `R`. + /// Registers the given component `R` and [required components] inherited from it as required by `T`, + /// and adds `T` to their lists of requirees. /// /// The given `inheritance_depth` determines how many levels of inheritance deep the requirement is. /// A direct requirement has a depth of `0`, and each level of inheritance increases the depth by `1`. /// Lower depths are more specific requirements, and can override existing less specific registrations. /// - /// This method does *not* recursively register required components for components required by `R`, - /// nor does it register them for components that require `T`. + /// This method does *not* register any components as required by components that require `T`. /// /// [required component]: Component#required-components /// @@ -1233,6 +1445,27 @@ impl Components { // Assuming it is valid, the component is in the list of required components, so it must exist already. let required_by = unsafe { self.get_required_by_mut(required).debug_checked_unwrap() }; required_by.insert(requiree); + + // Register the inherited required components for the requiree. + let required: Vec<(ComponentId, RequiredComponent)> = self + .get_info(required) + .unwrap() + .required_components() + .0 + .iter() + .map(|(id, component)| (*id, component.clone())) + .collect(); + + for (id, component) in required { + // Register the inherited required components for the requiree. + // The inheritance depth is increased by `1` since this is a component required by the original required component. + required_components.register_dynamic( + id, + component.constructor.clone(), + component.inheritance_depth + 1, + ); + self.get_required_by_mut(id).unwrap().insert(requiree); + } } #[inline] @@ -1250,6 +1483,16 @@ impl Components { .map(|info| &mut info.required_by) } + /// Retrieves the [`ComponentCloneHandlers`]. Can be used to get clone functions for components. + pub fn get_component_clone_handlers(&self) -> &ComponentCloneHandlers { + &self.component_clone_handlers + } + + /// Retrieves a mutable reference to the [`ComponentCloneHandlers`]. Can be used to set and update clone functions for components. + pub fn get_component_clone_handlers_mut(&mut self) -> &mut ComponentCloneHandlers { + &mut self.component_clone_handlers + } + /// Type-erased equivalent of [`Components::component_id()`]. #[inline] pub fn get_id(&self, type_id: TypeId) -> Option { @@ -1513,8 +1756,11 @@ impl<'a> TickCells<'a> { #[derive(Copy, Clone, Debug)] #[cfg_attr(feature = "bevy_reflect", derive(Reflect), reflect(Debug))] pub struct ComponentTicks { - pub(crate) added: Tick, - pub(crate) changed: Tick, + /// Tick recording the time this component or resource was added. + pub added: Tick, + + /// Tick recording the time this component or resource was most recently changed. + pub changed: Tick, } impl ComponentTicks { @@ -1532,19 +1778,8 @@ impl ComponentTicks { self.changed.is_newer_than(last_run, this_run) } - /// Returns the tick recording the time this component or resource was most recently changed. - #[inline] - pub fn last_changed_tick(&self) -> Tick { - self.changed - } - - /// Returns the tick recording the time this component or resource was added. - #[inline] - pub fn added_tick(&self) -> Tick { - self.added - } - - pub(crate) fn new(change_tick: Tick) -> Self { + /// Creates a new instance with the same change tick for `added` and `changed`. + pub fn new(change_tick: Tick) -> Self { Self { added: change_tick, changed: change_tick, @@ -1831,3 +2066,132 @@ impl RequiredComponents { } } } + +// NOTE: This should maybe be private, but it is currently public so that `bevy_ecs_macros` can use it. +// This exists as a standalone function instead of being inlined into the component derive macro so as +// to reduce the amount of generated code. +#[doc(hidden)] +pub fn enforce_no_required_components_recursion( + components: &Components, + recursion_check_stack: &[ComponentId], +) { + if let Some((&requiree, check)) = recursion_check_stack.split_last() { + if let Some(direct_recursion) = check + .iter() + .position(|&id| id == requiree) + .map(|index| index == check.len() - 1) + { + panic!( + "Recursive required components detected: {}\nhelp: {}", + recursion_check_stack + .iter() + .map(|id| format!("{}", ShortName(components.get_name(*id).unwrap()))) + .collect::>() + .join(" → "), + if direct_recursion { + format!( + "Remove require({})", + ShortName(components.get_name(requiree).unwrap()) + ) + } else { + "If this is intentional, consider merging the components.".into() + } + ); + } + } +} + +/// Component [clone handler function](ComponentCloneFn) implemented using the [`Clone`] trait. +/// Can be [set](ComponentCloneHandlers::set_component_handler) as clone handler for the specific component it is implemented for. +/// It will panic if set as handler for any other component. +/// +/// See [`ComponentCloneHandlers`] for more details. +pub fn component_clone_via_clone( + world: &mut DeferredWorld, + entity_cloner: &EntityCloner, +) { + let component = world + .entity(entity_cloner.source()) + .get::() + .expect("Component must exists on source entity") + .clone(); + world + .commands() + .entity(entity_cloner.target()) + .insert(component); +} + +/// Component [clone handler function](ComponentCloneFn) implemented using reflect. +/// Can be [set](ComponentCloneHandlers::set_component_handler) as clone handler for any registered component, +/// but only reflected components will be cloned. +/// +/// See [`ComponentCloneHandlers`] for more details. +#[cfg(feature = "bevy_reflect")] +pub fn component_clone_via_reflect(world: &mut DeferredWorld, entity_cloner: &EntityCloner) { + let component_id = entity_cloner.component_id(); + let source = entity_cloner.source(); + let target = entity_cloner.target(); + world.commands().queue(move |world: &mut World| { + world.resource_scope::(|world, registry| { + let registry = registry.read(); + + let component_info = world + .components() + .get_info(component_id) + .expect("Component must be registered"); + let Some(type_id) = component_info.type_id() else { + return; + }; + let Some(reflect_component) = + registry.get_type_data::(type_id) + else { + return; + }; + let source_component = reflect_component + .reflect(world.get_entity(source).expect("Source entity must exist")) + .expect("Source entity must have reflected component") + .clone_value(); + let mut target = world + .get_entity_mut(target) + .expect("Target entity must exist"); + reflect_component.apply_or_insert(&mut target, &*source_component, ®istry); + }); + }); +} + +/// Noop implementation of component clone handler function. +/// +/// See [`ComponentCloneHandlers`] for more details. +pub fn component_clone_ignore(_world: &mut DeferredWorld, _entity_cloner: &EntityCloner) {} + +/// Wrapper for components clone specialization using autoderef. +#[doc(hidden)] +pub struct ComponentCloneSpecializationWrapper(PhantomData); + +impl Default for ComponentCloneSpecializationWrapper { + fn default() -> Self { + Self(PhantomData) + } +} + +/// Base trait for components clone specialization using autoderef. +#[doc(hidden)] +pub trait ComponentCloneBase { + fn get_component_clone_handler(&self) -> ComponentCloneHandler; +} +impl ComponentCloneBase for ComponentCloneSpecializationWrapper { + fn get_component_clone_handler(&self) -> ComponentCloneHandler { + ComponentCloneHandler::default() + } +} + +/// Specialized trait for components clone specialization using autoderef. +#[doc(hidden)] +pub trait ComponentCloneViaClone { + fn get_component_clone_handler(&self) -> ComponentCloneHandler; +} +impl ComponentCloneViaClone for &ComponentCloneSpecializationWrapper { + fn get_component_clone_handler(&self) -> ComponentCloneHandler { + ComponentCloneHandler::Custom(component_clone_via_clone::) + } +} diff --git a/crates/bevy_ecs/src/entity/clone_entities.rs b/crates/bevy_ecs/src/entity/clone_entities.rs new file mode 100644 index 00000000000000..da0e51c25fcf62 --- /dev/null +++ b/crates/bevy_ecs/src/entity/clone_entities.rs @@ -0,0 +1,523 @@ +use alloc::sync::Arc; +use core::any::TypeId; + +use bevy_utils::{HashMap, HashSet}; + +use crate::{ + bundle::Bundle, + component::{component_clone_ignore, Component, ComponentCloneHandler, ComponentId}, + entity::Entity, + world::World, +}; + +/// A helper struct to clone an entity. Used internally by [`EntityCloneBuilder::clone_entity`] and custom clone handlers. +pub struct EntityCloner { + source: Entity, + target: Entity, + component_id: Option, + filter_allows_components: bool, + filter: Arc>, + clone_handlers_overrides: Arc>, +} + +impl EntityCloner { + /// Clones and inserts components from the `source` entity into `target` entity using the stored configuration. + pub fn clone_entity(&mut self, world: &mut World) { + let source_entity = world + .get_entity(self.source) + .expect("Source entity must exist"); + let archetype = source_entity.archetype(); + + let mut components = Vec::with_capacity(archetype.component_count()); + components.extend( + archetype + .components() + .filter(|id| self.is_cloning_allowed(id)), + ); + + for component in components { + let global_handlers = world.components().get_component_clone_handlers(); + let handler = match self.clone_handlers_overrides.get(&component) { + None => global_handlers.get_handler(component), + Some(ComponentCloneHandler::Default) => global_handlers.get_default_handler(), + Some(ComponentCloneHandler::Ignore) => component_clone_ignore, + Some(ComponentCloneHandler::Custom(handler)) => *handler, + }; + self.component_id = Some(component); + (handler)(&mut world.into(), self); + } + } + + fn is_cloning_allowed(&self, component: &ComponentId) -> bool { + (self.filter_allows_components && self.filter.contains(component)) + || (!self.filter_allows_components && !self.filter.contains(component)) + } + + /// Returns the current source entity. + pub fn source(&self) -> Entity { + self.source + } + + /// Returns the current target entity. + pub fn target(&self) -> Entity { + self.target + } + + /// Returns the [`ComponentId`] of currently cloned component. + pub fn component_id(&self) -> ComponentId { + self.component_id + .expect("ComponentId must be set in clone_entity") + } + + /// Reuse existing [`EntityCloner`] configuration with new source and target. + pub fn with_source_and_target(&self, source: Entity, target: Entity) -> EntityCloner { + EntityCloner { + source, + target, + filter: self.filter.clone(), + clone_handlers_overrides: self.clone_handlers_overrides.clone(), + ..*self + } + } +} + +/// Builder struct to clone an entity. Allows configuring which components to clone, as well as how to clone them. +/// After configuration is complete an entity can be cloned using [`Self::clone_entity`]. +/// +///``` +/// use bevy_ecs::prelude::*; +/// use bevy_ecs::entity::EntityCloneBuilder; +/// +/// #[derive(Component, Clone, PartialEq, Eq)] +/// struct A { +/// field: usize, +/// } +/// +/// let mut world = World::default(); +/// +/// let component = A { field: 5 }; +/// +/// let entity = world.spawn(component.clone()).id(); +/// let entity_clone = world.spawn_empty().id(); +/// +/// EntityCloneBuilder::new(&mut world).clone_entity(entity, entity_clone); +/// +/// assert!(world.get::(entity_clone).is_some_and(|c| *c == component)); +///``` +/// +/// # Default cloning strategy +/// By default, all types that derive [`Component`] and implement either [`Clone`] or `Reflect` (with `ReflectComponent`) will be cloned +/// (with `Clone`-based implementation preferred in case component implements both). +/// +/// It should be noted that if `Component` is implemented manually or if `Clone` implementation is conditional +/// (like when deriving `Clone` for a type with a generic parameter without `Clone` bound), +/// the component will be cloned using the [default cloning strategy](crate::component::ComponentCloneHandlers::get_default_handler). +/// To use `Clone`-based handler ([`component_clone_via_clone`](crate::component::component_clone_via_clone)) in this case it should be set manually using one +/// of the methods mentioned in the [Handlers](#handlers) section +/// +/// Here's an example of how to do it using [`get_component_clone_handler`](Component::get_component_clone_handler): +/// ``` +/// # use bevy_ecs::prelude::*; +/// # use bevy_ecs::component::{StorageType, component_clone_via_clone, ComponentCloneHandler, Mutable}; +/// #[derive(Clone)] +/// struct SomeComponent; +/// +/// impl Component for SomeComponent { +/// const STORAGE_TYPE: StorageType = StorageType::Table; +/// type Mutability = Mutable; +/// fn get_component_clone_handler() -> ComponentCloneHandler { +/// ComponentCloneHandler::Custom(component_clone_via_clone::) +/// } +/// } +/// ``` +/// +/// # Handlers +/// `EntityCloneBuilder` clones entities by cloning components using [`handlers`](ComponentCloneHandler), and there are multiple layers +/// to decide which handler to use for which component. The overall hierarchy looks like this (priority from most to least): +/// 1. local overrides using [`override_component_clone_handler`](Self::override_component_clone_handler) +/// 2. global overrides using [`set_component_handler`](crate::component::ComponentCloneHandlers::set_component_handler) +/// 3. component-defined handler using [`get_component_clone_handler`](Component::get_component_clone_handler) +/// 4. default handler override using [`set_default_handler`](crate::component::ComponentCloneHandlers::set_default_handler) +/// 5. reflect-based or noop default clone handler depending on if `bevy_reflect` feature is enabled or not. +#[derive(Debug)] +pub struct EntityCloneBuilder<'w> { + world: &'w mut World, + filter_allows_components: bool, + filter: HashSet, + clone_handlers_overrides: HashMap, +} + +impl<'w> EntityCloneBuilder<'w> { + /// Creates a new [`EntityCloneBuilder`] for world. + pub fn new(world: &'w mut World) -> Self { + Self { + world, + filter_allows_components: false, + filter: Default::default(), + clone_handlers_overrides: Default::default(), + } + } + + /// Finishes configuring the builder and clones `source` entity to `target`. + pub fn clone_entity(self, source: Entity, target: Entity) { + let EntityCloneBuilder { + world, + filter_allows_components, + filter, + clone_handlers_overrides, + .. + } = self; + + EntityCloner { + source, + target, + component_id: None, + filter_allows_components, + filter: Arc::new(filter), + clone_handlers_overrides: Arc::new(clone_handlers_overrides), + } + .clone_entity(world); + + world.flush_commands(); + } + + /// Adds all components of the bundle to the list of components to clone. + /// + /// Note that all components are allowed by default, to clone only explicitly allowed components make sure to call + /// [`deny_all`](`Self::deny_all`) before calling any of the `allow` methods. + pub fn allow(&mut self) -> &mut Self { + if self.filter_allows_components { + T::get_component_ids(self.world.components(), &mut |id| { + if let Some(id) = id { + self.filter.insert(id); + } + }); + } else { + T::get_component_ids(self.world.components(), &mut |id| { + if let Some(id) = id { + self.filter.remove(&id); + } + }); + } + self + } + + /// Extends the list of components to clone. + /// + /// Note that all components are allowed by default, to clone only explicitly allowed components make sure to call + /// [`deny_all`](`Self::deny_all`) before calling any of the `allow` methods. + pub fn allow_by_ids(&mut self, ids: impl IntoIterator) -> &mut Self { + if self.filter_allows_components { + self.filter.extend(ids); + } else { + ids.into_iter().for_each(|id| { + self.filter.remove(&id); + }); + } + self + } + + /// Extends the list of components to clone using [`TypeId`]s. + /// + /// Note that all components are allowed by default, to clone only explicitly allowed components make sure to call + /// [`deny_all`](`Self::deny_all`) before calling any of the `allow` methods. + pub fn allow_by_type_ids(&mut self, ids: impl IntoIterator) -> &mut Self { + let ids = ids + .into_iter() + .filter_map(|id| self.world.components().get_id(id)); + if self.filter_allows_components { + self.filter.extend(ids); + } else { + ids.into_iter().for_each(|id| { + self.filter.remove(&id); + }); + } + self + } + + /// Resets the filter to allow all components to be cloned. + pub fn allow_all(&mut self) -> &mut Self { + self.filter_allows_components = false; + self.filter.clear(); + self + } + + /// Disallows all components of the bundle from being cloned. + pub fn deny(&mut self) -> &mut Self { + if self.filter_allows_components { + T::get_component_ids(self.world.components(), &mut |id| { + if let Some(id) = id { + self.filter.remove(&id); + } + }); + } else { + T::get_component_ids(self.world.components(), &mut |id| { + if let Some(id) = id { + self.filter.insert(id); + } + }); + } + self + } + + /// Extends the list of components that shouldn't be cloned. + pub fn deny_by_ids(&mut self, ids: impl IntoIterator) -> &mut Self { + if self.filter_allows_components { + ids.into_iter().for_each(|id| { + self.filter.remove(&id); + }); + } else { + self.filter.extend(ids); + } + self + } + + /// Extends the list of components that shouldn't be cloned by type ids. + pub fn deny_by_type_ids(&mut self, ids: impl IntoIterator) -> &mut Self { + let ids = ids + .into_iter() + .filter_map(|id| self.world.components().get_id(id)); + if self.filter_allows_components { + ids.into_iter().for_each(|id| { + self.filter.remove(&id); + }); + } else { + self.filter.extend(ids); + } + self + } + + /// Sets the filter to deny all components. + pub fn deny_all(&mut self) -> &mut Self { + self.filter_allows_components = true; + self.filter.clear(); + self + } + + /// Overrides the [`ComponentCloneHandler`] for a component in this builder. + /// This handler will be used to clone the component instead of the global one defined by [`ComponentCloneHandlers`](crate::component::ComponentCloneHandlers) + /// + /// See [Handlers section of `EntityCloneBuilder`](EntityCloneBuilder#handlers) to understand how this affects handler priority. + pub fn override_component_clone_handler( + &mut self, + handler: ComponentCloneHandler, + ) -> &mut Self { + if let Some(id) = self.world.components().component_id::() { + self.clone_handlers_overrides.insert(id, handler); + } + self + } + + /// Removes a previously set override of [`ComponentCloneHandler`] for a component in this builder. + pub fn remove_component_clone_handler_override(&mut self) -> &mut Self { + if let Some(id) = self.world.components().component_id::() { + self.clone_handlers_overrides.remove(&id); + } + self + } +} + +#[cfg(test)] +mod tests { + use crate::{self as bevy_ecs, component::Component, entity::EntityCloneBuilder, world::World}; + + #[cfg(feature = "bevy_reflect")] + #[test] + fn clone_entity_using_reflect() { + use crate::reflect::{AppTypeRegistry, ReflectComponent}; + use bevy_reflect::Reflect; + + #[derive(Component, Reflect, Clone, PartialEq, Eq)] + #[reflect(Component)] + struct A { + field: usize, + } + + let mut world = World::default(); + world.init_resource::(); + let registry = world.get_resource::().unwrap(); + registry.write().register::(); + + let component = A { field: 5 }; + + let e = world.spawn(component.clone()).id(); + let e_clone = world.spawn_empty().id(); + + EntityCloneBuilder::new(&mut world).clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + } + + #[test] + fn clone_entity_using_clone() { + #[derive(Component, Clone, PartialEq, Eq)] + struct A { + field: usize, + } + + let mut world = World::default(); + + let component = A { field: 5 }; + + let e = world.spawn(component.clone()).id(); + let e_clone = world.spawn_empty().id(); + + EntityCloneBuilder::new(&mut world).clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + } + + #[cfg(feature = "bevy_reflect")] + #[test] + fn clone_entity_specialization() { + use crate::reflect::{AppTypeRegistry, ReflectComponent}; + use bevy_reflect::Reflect; + + #[derive(Component, Reflect, PartialEq, Eq)] + #[reflect(Component)] + struct A { + field: usize, + } + + impl Clone for A { + fn clone(&self) -> Self { + Self { field: 10 } + } + } + + let mut world = World::default(); + world.init_resource::(); + let registry = world.get_resource::().unwrap(); + registry.write().register::(); + + let component = A { field: 5 }; + + let e = world.spawn(component.clone()).id(); + let e_clone = world.spawn_empty().id(); + + EntityCloneBuilder::new(&mut world).clone_entity(e, e_clone); + + assert!(world + .get::(e_clone) + .is_some_and(|comp| *comp == A { field: 10 })); + } + + #[test] + fn clone_entity_with_allow_filter() { + #[derive(Component, Clone, PartialEq, Eq)] + struct A { + field: usize, + } + + #[derive(Component, Clone)] + struct B; + + let mut world = World::default(); + + let component = A { field: 5 }; + + let e = world.spawn((component.clone(), B)).id(); + let e_clone = world.spawn_empty().id(); + + let mut builder = EntityCloneBuilder::new(&mut world); + builder.deny_all(); + builder.allow::(); + builder.clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + assert!(world.get::(e_clone).is_none()); + } + + #[test] + fn clone_entity_with_deny_filter() { + #[derive(Component, Clone, PartialEq, Eq)] + struct A { + field: usize, + } + + #[derive(Component, Clone)] + struct B; + + #[derive(Component, Clone)] + struct C; + + let mut world = World::default(); + + let component = A { field: 5 }; + + let e = world.spawn((component.clone(), B, C)).id(); + let e_clone = world.spawn_empty().id(); + + let mut builder = EntityCloneBuilder::new(&mut world); + builder.deny::(); + builder.clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + assert!(world.get::(e_clone).is_none()); + assert!(world.get::(e_clone).is_some()); + } + + #[test] + fn clone_entity_with_override_allow_filter() { + #[derive(Component, Clone, PartialEq, Eq)] + struct A { + field: usize, + } + + #[derive(Component, Clone)] + struct B; + + #[derive(Component, Clone)] + struct C; + + let mut world = World::default(); + + let component = A { field: 5 }; + + let e = world.spawn((component.clone(), B, C)).id(); + let e_clone = world.spawn_empty().id(); + + let mut builder = EntityCloneBuilder::new(&mut world); + builder.deny_all(); + builder.allow::(); + builder.allow::(); + builder.allow::(); + builder.deny::(); + builder.clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + assert!(world.get::(e_clone).is_none()); + assert!(world.get::(e_clone).is_some()); + } + + #[test] + fn clone_entity_with_override_bundle() { + #[derive(Component, Clone, PartialEq, Eq)] + struct A { + field: usize, + } + + #[derive(Component, Clone)] + struct B; + + #[derive(Component, Clone)] + struct C; + + let mut world = World::default(); + + let component = A { field: 5 }; + + let e = world.spawn((component.clone(), B, C)).id(); + let e_clone = world.spawn_empty().id(); + + let mut builder = EntityCloneBuilder::new(&mut world); + builder.deny_all(); + builder.allow::<(A, B, C)>(); + builder.deny::<(B, C)>(); + builder.clone_entity(e, e_clone); + + assert!(world.get::(e_clone).is_some_and(|c| *c == component)); + assert!(world.get::(e_clone).is_none()); + assert!(world.get::(e_clone).is_none()); + } +} diff --git a/crates/bevy_ecs/src/entity/hash.rs b/crates/bevy_ecs/src/entity/hash.rs index 1b1ff531ffeb4f..2e7c8ff2a3fc6c 100644 --- a/crates/bevy_ecs/src/entity/hash.rs +++ b/crates/bevy_ecs/src/entity/hash.rs @@ -28,7 +28,8 @@ impl BuildHasher for EntityHash { /// /// If you have an unusual case -- say all your indices are multiples of 256 /// or most of the entities are dead generations -- then you might want also to -/// try [`AHasher`](bevy_utils::AHasher) for a slower hash computation but fewer lookup conflicts. +/// try [`DefaultHasher`](bevy_utils::DefaultHasher) for a slower hash +/// computation but fewer lookup conflicts. #[derive(Debug, Default)] pub struct EntityHasher { hash: u64, diff --git a/crates/bevy_ecs/src/entity/mod.rs b/crates/bevy_ecs/src/entity/mod.rs index 75df8e1bb61fb4..68a89f660f2e4c 100644 --- a/crates/bevy_ecs/src/entity/mod.rs +++ b/crates/bevy_ecs/src/entity/mod.rs @@ -35,12 +35,14 @@ //! [`World::despawn`]: crate::world::World::despawn //! [`EntityWorldMut::insert`]: crate::world::EntityWorldMut::insert //! [`EntityWorldMut::remove`]: crate::world::EntityWorldMut::remove +mod clone_entities; mod map_entities; mod visit_entities; #[cfg(feature = "bevy_reflect")] use bevy_reflect::Reflect; #[cfg(all(feature = "bevy_reflect", feature = "serialize"))] use bevy_reflect::{ReflectDeserialize, ReflectSerialize}; +pub use clone_entities::*; pub use map_entities::*; pub use visit_entities::*; @@ -153,7 +155,7 @@ type IdCursor = isize; reflect(Serialize, Deserialize) )] // Alignment repr necessary to allow LLVM to better output -// optimised codegen for `to_bits`, `PartialEq` and `Ord`. +// optimized codegen for `to_bits`, `PartialEq` and `Ord`. #[repr(C, align(8))] pub struct Entity { // Do not reorder the fields here. The ordering is explicitly used by repr(C) @@ -170,7 +172,7 @@ pub struct Entity { impl PartialEq for Entity { #[inline] fn eq(&self, other: &Entity) -> bool { - // By using `to_bits`, the codegen can be optimised out even + // By using `to_bits`, the codegen can be optimized out even // further potentially. Relies on the correct alignment/field // order of `Entity`. self.to_bits() == other.to_bits() @@ -179,10 +181,10 @@ impl PartialEq for Entity { impl Eq for Entity {} -// The derive macro codegen output is not optimal and can't be optimised as well +// The derive macro codegen output is not optimal and can't be optimized as well // by the compiler. This impl resolves the issue of non-optimal codegen by relying // on comparing against the bit representation of `Entity` instead of comparing -// the fields. The result is then LLVM is able to optimise the codegen for Entity +// the fields. The result is then LLVM is able to optimize the codegen for Entity // far beyond what the derive macro can. // See impl PartialOrd for Entity { @@ -193,10 +195,10 @@ impl PartialOrd for Entity { } } -// The derive macro codegen output is not optimal and can't be optimised as well +// The derive macro codegen output is not optimal and can't be optimized as well // by the compiler. This impl resolves the issue of non-optimal codegen by relying // on comparing against the bit representation of `Entity` instead of comparing -// the fields. The result is then LLVM is able to optimise the codegen for Entity +// the fields. The result is then LLVM is able to optimize the codegen for Entity // far beyond what the derive macro can. // See impl Ord for Entity { @@ -310,7 +312,7 @@ impl Entity { match id { Ok(entity) => entity, - Err(_) => panic!("Attempted to initialise invalid bits as an entity"), + Err(_) => panic!("Attempted to initialize invalid bits as an entity"), } } @@ -397,6 +399,8 @@ impl<'de> Deserialize<'de> for Entity { /// /// This takes the format: `{index}v{generation}#{bits}`. /// +/// For [`Entity::PLACEHOLDER`], this outputs `PLACEHOLDER`. +/// /// # Usage /// /// Prefer to use this format for debugging and logging purposes. Because the output contains @@ -416,22 +420,32 @@ impl<'de> Deserialize<'de> for Entity { /// ``` impl fmt::Debug for Entity { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "{}v{}#{}", - self.index(), - self.generation(), - self.to_bits() - ) + if self == &Self::PLACEHOLDER { + write!(f, "PLACEHOLDER") + } else { + write!( + f, + "{}v{}#{}", + self.index(), + self.generation(), + self.to_bits() + ) + } } } /// Outputs the short entity identifier, including the index and generation. /// /// This takes the format: `{index}v{generation}`. +/// +/// For [`Entity::PLACEHOLDER`], this outputs `PLACEHOLDER`. impl fmt::Display for Entity { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}v{}", self.index(), self.generation()) + if self == &Self::PLACEHOLDER { + write!(f, "PLACEHOLDER") + } else { + write!(f, "{}v{}", self.index(), self.generation()) + } } } @@ -901,25 +915,6 @@ impl Entities { } } - /// # Safety - /// - /// This function is safe if and only if the world this Entities is on has no entities. - pub unsafe fn flush_and_reserve_invalid_assuming_no_entities(&mut self, count: usize) { - let free_cursor = self.free_cursor.get_mut(); - *free_cursor = 0; - self.meta.reserve(count); - // SAFETY: The EntityMeta struct only contains integers, and it is valid to have all bytes set to u8::MAX - unsafe { - self.meta.as_mut_ptr().write_bytes(u8::MAX, count); - } - // SAFETY: We have reserved `count` elements above and we have initialized values from index 0 to `count`. - unsafe { - self.meta.set_len(count); - } - - self.len = count as u32; - } - /// The count of all entities in the [`World`] that have ever been allocated /// including the entities that are currently freed. /// @@ -945,13 +940,7 @@ impl Entities { } } -// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill -// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`]. -// Safety: -// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX. -/// Metadata for an [`Entity`]. #[derive(Copy, Clone, Debug)] -#[repr(C)] struct EntityMeta { /// The current generation of the [`Entity`]. pub generation: NonZero, @@ -967,13 +956,8 @@ impl EntityMeta { }; } -// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill -// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`]. -// SAFETY: -// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX. -/// A location of an entity in an archetype. +/// Records where an entity's data is stored. #[derive(Copy, Clone, Debug, PartialEq)] -#[repr(C)] pub struct EntityLocation { /// The ID of the [`Archetype`] the [`Entity`] belongs to. /// @@ -998,7 +982,7 @@ pub struct EntityLocation { impl EntityLocation { /// location for **pending entity** and **invalid entity** - const INVALID: EntityLocation = EntityLocation { + pub(crate) const INVALID: EntityLocation = EntityLocation { archetype_id: ArchetypeId::INVALID, archetype_row: ArchetypeRow::INVALID, table_id: TableId::INVALID, @@ -1195,16 +1179,21 @@ mod tests { fn entity_debug() { let entity = Entity::from_raw(42); let string = format!("{:?}", entity); - assert!(string.contains("42")); - assert!(string.contains("v1")); - assert!(string.contains(format!("#{}", entity.to_bits()).as_str())); + assert_eq!(string, "42v1#4294967338"); + + let entity = Entity::PLACEHOLDER; + let string = format!("{:?}", entity); + assert_eq!(string, "PLACEHOLDER"); } #[test] fn entity_display() { let entity = Entity::from_raw(42); let string = format!("{}", entity); - assert!(string.contains("42")); - assert!(string.contains("v1")); + assert_eq!(string, "42v1"); + + let entity = Entity::PLACEHOLDER; + let string = format!("{}", entity); + assert_eq!(string, "PLACEHOLDER"); } } diff --git a/crates/bevy_ecs/src/entity/visit_entities.rs b/crates/bevy_ecs/src/entity/visit_entities.rs index 79b5197d2ea79b..abce76853d403f 100644 --- a/crates/bevy_ecs/src/entity/visit_entities.rs +++ b/crates/bevy_ecs/src/entity/visit_entities.rs @@ -113,7 +113,7 @@ mod tests { let mut entity_map = EntityHashMap::::default(); let mut remapped = Foo { ordered: vec![], - unordered: HashSet::new(), + unordered: HashSet::default(), single: Entity::PLACEHOLDER, not_an_entity: foo.not_an_entity.clone(), }; diff --git a/crates/bevy_ecs/src/event/base.rs b/crates/bevy_ecs/src/event/base.rs index ca26a0abee2e6d..dad87382b4decf 100644 --- a/crates/bevy_ecs/src/event/base.rs +++ b/crates/bevy_ecs/src/event/base.rs @@ -38,7 +38,7 @@ pub trait Event: Component { /// The component that describes which Entity to propagate this event to next, when [propagation] is enabled. /// /// [propagation]: crate::observer::Trigger::propagate - type Traversal: Traversal; + type Traversal: Traversal; /// When true, this event will always attempt to propagate when [triggered], without requiring a call /// to [`Trigger::propagate`]. diff --git a/crates/bevy_ecs/src/event/collections.rs b/crates/bevy_ecs/src/event/collections.rs index 4c9bc1996f8da8..c04512ba62bbae 100644 --- a/crates/bevy_ecs/src/event/collections.rs +++ b/crates/bevy_ecs/src/event/collections.rs @@ -114,9 +114,7 @@ impl Default for Events { impl Events { /// Returns the index of the oldest event stored in the event buffer. pub fn oldest_event_count(&self) -> usize { - self.events_a - .start_event_count - .min(self.events_b.start_event_count) + self.events_a.start_event_count } /// "Sends" an `event` by writing it to the current event buffer. @@ -279,7 +277,7 @@ impl Events { /// Get a specific event by id if it still exists in the events buffer. pub fn get_event(&self, id: usize) -> Option<(&E, EventId)> { - if id < self.oldest_id() { + if id < self.oldest_event_count() { return None; } @@ -291,11 +289,6 @@ impl Events { .map(|instance| (&instance.event, instance.event_id)) } - /// Oldest id still in the events buffer. - pub fn oldest_id(&self) -> usize { - self.events_a.start_event_count - } - /// Which event buffer is this event id a part of. fn sequence(&self, id: usize) -> &EventSequence { if id < self.events_b.start_event_count { diff --git a/crates/bevy_ecs/src/event/registry.rs b/crates/bevy_ecs/src/event/registry.rs index 67061f221208b5..ccc7b9fb92a3f7 100644 --- a/crates/bevy_ecs/src/event/registry.rs +++ b/crates/bevy_ecs/src/event/registry.rs @@ -49,7 +49,7 @@ impl EventRegistry { // By initializing the resource here, we can be sure that it is present, // and receive the correct, up-to-date `ComponentId` even if it was previously removed. let component_id = world.init_resource::>(); - let mut registry = world.get_resource_or_insert_with(Self::default); + let mut registry = world.get_resource_or_init::(); registry.event_updates.push(RegisteredEvent { component_id, previously_updated: false, @@ -84,7 +84,7 @@ impl EventRegistry { /// Removes an event from the world and it's associated [`EventRegistry`]. pub fn deregister_events(world: &mut World) { let component_id = world.init_resource::>(); - let mut registry = world.get_resource_or_insert_with(Self::default); + let mut registry = world.get_resource_or_init::(); registry .event_updates .retain(|e| e.component_id != component_id); diff --git a/crates/bevy_ecs/src/identifier/error.rs b/crates/bevy_ecs/src/identifier/error.rs index 8d278528ab99cd..a4679a12c634b7 100644 --- a/crates/bevy_ecs/src/identifier/error.rs +++ b/crates/bevy_ecs/src/identifier/error.rs @@ -8,7 +8,7 @@ use core::fmt; #[derive(Debug, PartialEq, Eq, Clone, Copy)] #[non_exhaustive] pub enum IdentifierError { - /// A given ID has an invalid value for initialising to a [`crate::identifier::Identifier`]. + /// A given ID has an invalid value for initializing to a [`crate::identifier::Identifier`]. InvalidIdentifier, /// A given ID has an invalid configuration of bits for converting to an [`crate::entity::Entity`]. InvalidEntityId(u64), diff --git a/crates/bevy_ecs/src/identifier/mod.rs b/crates/bevy_ecs/src/identifier/mod.rs index 3774ea5b3fef58..6134e472427e26 100644 --- a/crates/bevy_ecs/src/identifier/mod.rs +++ b/crates/bevy_ecs/src/identifier/mod.rs @@ -23,7 +23,7 @@ pub(crate) mod masks; #[cfg_attr(feature = "bevy_reflect", reflect(opaque))] #[cfg_attr(feature = "bevy_reflect", reflect(Debug, Hash, PartialEq))] // Alignment repr necessary to allow LLVM to better output -// optimised codegen for `to_bits`, `PartialEq` and `Ord`. +// optimized codegen for `to_bits`, `PartialEq` and `Ord`. #[repr(C, align(8))] pub struct Identifier { // Do not reorder the fields here. The ordering is explicitly used by repr(C) @@ -49,7 +49,7 @@ impl Identifier { let packed_high = IdentifierMask::pack_kind_into_high(masked_value, kind); // If the packed high component ends up being zero, that means that we tried - // to initialise an Identifier into an invalid state. + // to initialize an Identifier into an invalid state. if packed_high == 0 { Err(IdentifierError::InvalidIdentifier) } else { @@ -107,7 +107,7 @@ impl Identifier { match id { Ok(id) => id, - Err(_) => panic!("Attempted to initialise invalid bits as an id"), + Err(_) => panic!("Attempted to initialize invalid bits as an id"), } } @@ -133,7 +133,7 @@ impl Identifier { impl PartialEq for Identifier { #[inline] fn eq(&self, other: &Self) -> bool { - // By using `to_bits`, the codegen can be optimised out even + // By using `to_bits`, the codegen can be optimized out even // further potentially. Relies on the correct alignment/field // order of `Entity`. self.to_bits() == other.to_bits() @@ -142,10 +142,10 @@ impl PartialEq for Identifier { impl Eq for Identifier {} -// The derive macro codegen output is not optimal and can't be optimised as well +// The derive macro codegen output is not optimal and can't be optimized as well // by the compiler. This impl resolves the issue of non-optimal codegen by relying // on comparing against the bit representation of `Entity` instead of comparing -// the fields. The result is then LLVM is able to optimise the codegen for Entity +// the fields. The result is then LLVM is able to optimize the codegen for Entity // far beyond what the derive macro can. // See impl PartialOrd for Identifier { @@ -156,10 +156,10 @@ impl PartialOrd for Identifier { } } -// The derive macro codegen output is not optimal and can't be optimised as well +// The derive macro codegen output is not optimal and can't be optimized as well // by the compiler. This impl resolves the issue of non-optimal codegen by relying // on comparing against the bit representation of `Entity` instead of comparing -// the fields. The result is then LLVM is able to optimise the codegen for Entity +// the fields. The result is then LLVM is able to optimize the codegen for Entity // far beyond what the derive macro can. // See impl Ord for Identifier { diff --git a/crates/bevy_ecs/src/intern.rs b/crates/bevy_ecs/src/intern.rs index 179fdc8b82ad9e..6668a2a9a6aed4 100644 --- a/crates/bevy_ecs/src/intern.rs +++ b/crates/bevy_ecs/src/intern.rs @@ -164,8 +164,8 @@ impl Default for Interner { #[cfg(test)] mod tests { - use core::hash::{Hash, Hasher}; - use std::collections::hash_map::DefaultHasher; + use bevy_utils::FixedHasher; + use core::hash::{BuildHasher, Hash, Hasher}; use crate::intern::{Internable, Interned, Interner}; @@ -250,13 +250,8 @@ mod tests { assert_eq!(a, b); - let mut hasher = DefaultHasher::default(); - a.hash(&mut hasher); - let hash_a = hasher.finish(); - - let mut hasher = DefaultHasher::default(); - b.hash(&mut hasher); - let hash_b = hasher.finish(); + let hash_a = FixedHasher.hash_one(a); + let hash_b = FixedHasher.hash_one(b); assert_eq!(hash_a, hash_b); } diff --git a/crates/bevy_ecs/src/lib.rs b/crates/bevy_ecs/src/lib.rs index c86ecb78e32427..b5c379006eabda 100644 --- a/crates/bevy_ecs/src/lib.rs +++ b/crates/bevy_ecs/src/lib.rs @@ -1,5 +1,7 @@ // FIXME(11590): remove this once the lint is fixed #![allow(unsafe_op_in_unsafe_fn)] +// TODO: remove once Edition 2024 is released +#![allow(dependency_on_unit_never_type_fallback)] #![doc = include_str!("../README.md")] // `rustdoc_internals` is needed for `#[doc(fake_variadics)]` #![allow(internal_features)] @@ -30,6 +32,7 @@ pub mod query; #[cfg(feature = "bevy_reflect")] pub mod reflect; pub mod removal_detection; +pub mod result; pub mod schedule; pub mod storage; pub mod system; @@ -42,29 +45,31 @@ pub use bevy_ptr as ptr; /// /// This includes the most common types in this crate, re-exported for your convenience. pub mod prelude { + #[allow(deprecated)] #[doc(hidden)] pub use crate::{ bundle::Bundle, change_detection::{DetectChanges, DetectChangesMut, Mut, Ref}, - component::Component, + component::{require, Component}, entity::{Entity, EntityMapper}, event::{Event, EventMutator, EventReader, EventWriter, Events}, - observer::{Observer, Trigger}, + observer::{CloneEntityWithObserversExt, Observer, Trigger}, query::{Added, AnyOf, Changed, Has, Or, QueryBuilder, QueryState, With, Without}, removal_detection::RemovedComponents, + result::{Error, Result}, schedule::{ - apply_deferred, common_conditions::*, Condition, IntoSystemConfigs, IntoSystemSet, - IntoSystemSetConfigs, Schedule, Schedules, SystemSet, + apply_deferred, common_conditions::*, ApplyDeferred, Condition, IntoSystemConfigs, + IntoSystemSet, IntoSystemSetConfigs, Schedule, Schedules, SystemSet, }, system::{ Commands, Deferred, EntityCommand, EntityCommands, In, InMut, InRef, IntoSystem, Local, NonSend, NonSendMut, ParallelCommands, ParamSet, Populated, Query, ReadOnlySystem, Res, ResMut, Resource, Single, System, SystemIn, SystemInput, SystemParamBuilder, - SystemParamFunction, + SystemParamFunction, WithParamWarnPolicy, }, world::{ - Command, EntityMut, EntityRef, EntityWorldMut, FromWorld, OnAdd, OnInsert, OnRemove, - OnReplace, World, + Command, EntityMut, EntityRef, EntityWorldMut, FilteredResources, FilteredResourcesMut, + FromWorld, OnAdd, OnInsert, OnRemove, OnReplace, World, }, }; @@ -82,11 +87,10 @@ pub mod prelude { #[cfg(test)] mod tests { use crate as bevy_ecs; - use crate::component::{RequiredComponents, RequiredComponentsError}; use crate::{ bundle::Bundle, change_detection::Ref, - component::{Component, ComponentId}, + component::{require, Component, ComponentId, RequiredComponents, RequiredComponentsError}, entity::Entity, prelude::Or, query::{Added, Changed, FilteredAccess, QueryFilter, With, Without}, @@ -417,7 +421,7 @@ mod tests { let mut world = World::new(); let e = world.spawn((TableStored("abc"), A(123))).id(); let f = world.spawn((TableStored("def"), A(456), B(1))).id(); - let mut results = HashSet::new(); + let mut results = >::default(); world .query::<(Entity, &A)>() .iter(&world) @@ -594,7 +598,9 @@ mod tests { .collect::>(); assert_eq!( ents, - HashSet::from([(e, None, A(123)), (f, Some(SparseStored(1)), A(456))]) + [(e, None, A(123)), (f, Some(SparseStored(1)), A(456))] + .into_iter() + .collect::>() ); } @@ -626,7 +632,9 @@ mod tests { .iter(&world) .map(|(e, &i, &b)| (e, i, b)) .collect::>(), - HashSet::from([(e1, A(1), B(3)), (e2, A(2), B(4))]) + [(e1, A(1), B(3)), (e2, A(2), B(4))] + .into_iter() + .collect::>() ); assert_eq!(world.entity_mut(e1).take::(), Some(A(1))); assert_eq!( @@ -643,7 +651,9 @@ mod tests { .iter(&world) .map(|(e, &B(b), &TableStored(s))| (e, b, s)) .collect::>(), - HashSet::from([(e2, 4, "xyz"), (e1, 3, "abc")]) + [(e2, 4, "xyz"), (e1, 3, "abc")] + .into_iter() + .collect::>() ); world.entity_mut(e1).insert(A(43)); assert_eq!( @@ -652,7 +662,9 @@ mod tests { .iter(&world) .map(|(e, &i, &b)| (e, i, b)) .collect::>(), - HashSet::from([(e2, A(2), B(4)), (e1, A(43), B(3))]) + [(e2, A(2), B(4)), (e1, A(43), B(3))] + .into_iter() + .collect::>() ); world.entity_mut(e1).insert(C); assert_eq!( @@ -950,7 +962,7 @@ mod tests { assert_eq!( get_filtered::>(&mut world), - HashSet::from([e1, e3]) + [e1, e3].into_iter().collect::>() ); // ensure changing an entity's archetypes also moves its changed state @@ -958,7 +970,7 @@ mod tests { assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3, e1]), + [e3, e1].into_iter().collect::>(), "changed entities list should not change" ); @@ -967,7 +979,7 @@ mod tests { assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3, e1]), + [e3, e1].into_iter().collect::>(), "changed entities list should not change" ); @@ -975,7 +987,7 @@ mod tests { assert!(world.despawn(e2)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3, e1]), + [e3, e1].into_iter().collect::>(), "changed entities list should not change" ); @@ -983,7 +995,7 @@ mod tests { assert!(world.despawn(e1)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3]), + [e3].into_iter().collect::>(), "e1 should no longer be returned" ); @@ -994,11 +1006,20 @@ mod tests { let e4 = world.spawn_empty().id(); world.entity_mut(e4).insert(A(0)); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); world.entity_mut(e4).insert(A(1)); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); world.clear_trackers(); @@ -1007,9 +1028,18 @@ mod tests { world.entity_mut(e4).insert((A(0), B(0))); assert!(get_filtered::>(&mut world).is_empty()); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e4])); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); + assert_eq!( + get_filtered::>(&mut world), + [e4].into_iter().collect::>() + ); } #[test] @@ -1041,19 +1071,19 @@ mod tests { assert_eq!( get_filtered::>(&mut world), - HashSet::from([e1, e3]) + [e1, e3].into_iter().collect::>() ); // ensure changing an entity's archetypes also moves its changed state world.entity_mut(e1).insert(C); - assert_eq!(get_filtered::>(&mut world), HashSet::from([e3, e1]), "changed entities list should not change (although the order will due to archetype moves)"); + assert_eq!(get_filtered::>(&mut world), [e3, e1].into_iter().collect::>(), "changed entities list should not change (although the order will due to archetype moves)"); // spawning a new SparseStored entity should not change existing changed state world.entity_mut(e1).insert(SparseStored(0)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3, e1]), + [e3, e1].into_iter().collect::>(), "changed entities list should not change" ); @@ -1061,7 +1091,7 @@ mod tests { assert!(world.despawn(e2)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3, e1]), + [e3, e1].into_iter().collect::>(), "changed entities list should not change" ); @@ -1069,7 +1099,7 @@ mod tests { assert!(world.despawn(e1)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e3]), + [e3].into_iter().collect::>(), "e1 should no longer be returned" ); @@ -1082,17 +1112,17 @@ mod tests { world.entity_mut(e4).insert(SparseStored(0)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e4]) + [e4].into_iter().collect::>() ); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e4]) + [e4].into_iter().collect::>() ); world.entity_mut(e4).insert(A(1)); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e4]) + [e4].into_iter().collect::>() ); world.clear_trackers(); @@ -1104,7 +1134,7 @@ mod tests { assert!(get_filtered::>(&mut world).is_empty()); assert_eq!( get_filtered::>(&mut world), - HashSet::from([e4]) + [e4].into_iter().collect::>() ); } @@ -1288,7 +1318,12 @@ mod tests { .iter(&world) .map(|(a, b)| (a.0, b.0)) .collect::>(); - assert_eq!(results, HashSet::from([(1, "1"), (2, "2"), (3, "3"),])); + assert_eq!( + results, + [(1, "1"), (2, "2"), (3, "3"),] + .into_iter() + .collect::>() + ); let removed_bundle = world.entity_mut(e2).take::<(B, TableStored)>().unwrap(); assert_eq!(removed_bundle, (B(2), TableStored("2"))); @@ -1297,11 +1332,14 @@ mod tests { .iter(&world) .map(|(a, b)| (a.0, b.0)) .collect::>(); - assert_eq!(results, HashSet::from([(1, "1"), (3, "3"),])); + assert_eq!( + results, + [(1, "1"), (3, "3"),].into_iter().collect::>() + ); let mut a_query = world.query::<&A>(); let results = a_query.iter(&world).map(|a| a.0).collect::>(); - assert_eq!(results, HashSet::from([1, 3, 2])); + assert_eq!(results, [1, 3, 2].into_iter().collect::>()); let entity_ref = world.entity(e2); assert_eq!( @@ -1480,6 +1518,7 @@ mod tests { #[test] fn resource_scope() { let mut world = World::default(); + assert!(world.try_resource_scope::(|_, _| {}).is_none()); world.insert_resource(A(0)); world.resource_scope(|world: &mut World, mut value: Mut| { value.0 += 1; @@ -1616,107 +1655,6 @@ mod tests { assert_eq!(0, query_min_size![(&A, &B), Or<(Changed, Changed)>]); } - #[test] - fn reserve_entities_across_worlds() { - let mut world_a = World::default(); - let mut world_b = World::default(); - - let e1 = world_a.spawn(A(1)).id(); - let e2 = world_a.spawn(A(2)).id(); - let e3 = world_a.entities().reserve_entity(); - world_a.flush_entities(); - - let world_a_max_entities = world_a.entities().len(); - world_b.entities.reserve_entities(world_a_max_entities); - world_b.entities.flush_as_invalid(); - - let e4 = world_b.spawn(A(4)).id(); - assert_eq!( - e4, - Entity::from_raw(3), - "new entity is created immediately after world_a's max entity" - ); - assert!(world_b.get::(e1).is_none()); - assert!(world_b.get_entity(e1).is_none()); - - assert!(world_b.get::(e2).is_none()); - assert!(world_b.get_entity(e2).is_none()); - - assert!(world_b.get::(e3).is_none()); - assert!(world_b.get_entity(e3).is_none()); - - world_b.get_or_spawn(e1).unwrap().insert(B(1)); - assert_eq!( - world_b.get::(e1), - Some(&B(1)), - "spawning into 'world_a' entities works" - ); - - world_b.get_or_spawn(e4).unwrap().insert(B(4)); - assert_eq!( - world_b.get::(e4), - Some(&B(4)), - "spawning into existing `world_b` entities works" - ); - assert_eq!( - world_b.get::(e4), - Some(&A(4)), - "spawning into existing `world_b` entities works" - ); - - let e4_mismatched_generation = - Entity::from_raw_and_generation(3, NonZero::::new(2).unwrap()); - assert!( - world_b.get_or_spawn(e4_mismatched_generation).is_none(), - "attempting to spawn on top of an entity with a mismatched entity generation fails" - ); - assert_eq!( - world_b.get::(e4), - Some(&B(4)), - "failed mismatched spawn doesn't change existing entity" - ); - assert_eq!( - world_b.get::(e4), - Some(&A(4)), - "failed mismatched spawn doesn't change existing entity" - ); - - let high_non_existent_entity = Entity::from_raw(6); - world_b - .get_or_spawn(high_non_existent_entity) - .unwrap() - .insert(B(10)); - assert_eq!( - world_b.get::(high_non_existent_entity), - Some(&B(10)), - "inserting into newly allocated high / non-continuous entity id works" - ); - - let high_non_existent_but_reserved_entity = Entity::from_raw(5); - assert!( - world_b.get_entity(high_non_existent_but_reserved_entity).is_none(), - "entities between high-newly allocated entity and continuous block of existing entities don't exist" - ); - - let reserved_entities = vec![ - world_b.entities().reserve_entity(), - world_b.entities().reserve_entity(), - world_b.entities().reserve_entity(), - world_b.entities().reserve_entity(), - ]; - - assert_eq!( - reserved_entities, - vec![ - Entity::from_raw(5), - Entity::from_raw(4), - Entity::from_raw(7), - Entity::from_raw(8), - ], - "space between original entities and high entities is used for new entity ids" - ); - } - #[test] fn insert_or_spawn_batch() { let mut world = World::default(); @@ -1800,6 +1738,134 @@ mod tests { ); } + #[test] + fn insert_batch() { + let mut world = World::default(); + let e0 = world.spawn(A(0)).id(); + let e1 = world.spawn(B(0)).id(); + + let values = vec![(e0, (A(1), B(0))), (e1, (A(0), B(1)))]; + + world.insert_batch(values); + + assert_eq!( + world.get::(e0), + Some(&A(1)), + "first entity's A component should have been replaced" + ); + assert_eq!( + world.get::(e0), + Some(&B(0)), + "first entity should have received B component" + ); + assert_eq!( + world.get::(e1), + Some(&A(0)), + "second entity should have received A component" + ); + assert_eq!( + world.get::(e1), + Some(&B(1)), + "second entity's B component should have been replaced" + ); + } + + #[test] + fn insert_batch_same_archetype() { + let mut world = World::default(); + let e0 = world.spawn((A(0), B(0))).id(); + let e1 = world.spawn((A(0), B(0))).id(); + let e2 = world.spawn(B(0)).id(); + + let values = vec![(e0, (B(1), C)), (e1, (B(2), C)), (e2, (B(3), C))]; + + world.insert_batch(values); + let mut query = world.query::<(Option<&A>, &B, &C)>(); + let component_values = query.get_many(&world, [e0, e1, e2]).unwrap(); + + assert_eq!( + component_values, + [(Some(&A(0)), &B(1), &C), (Some(&A(0)), &B(2), &C), (None, &B(3), &C)], + "all entities should have had their B component replaced, received C component, and had their A component (or lack thereof) unchanged" + ); + } + + #[test] + fn insert_batch_if_new() { + let mut world = World::default(); + let e0 = world.spawn(A(0)).id(); + let e1 = world.spawn(B(0)).id(); + + let values = vec![(e0, (A(1), B(0))), (e1, (A(0), B(1)))]; + + world.insert_batch_if_new(values); + + assert_eq!( + world.get::(e0), + Some(&A(0)), + "first entity's A component should not have been replaced" + ); + assert_eq!( + world.get::(e0), + Some(&B(0)), + "first entity should have received B component" + ); + assert_eq!( + world.get::(e1), + Some(&A(0)), + "second entity should have received A component" + ); + assert_eq!( + world.get::(e1), + Some(&B(0)), + "second entity's B component should not have been replaced" + ); + } + + #[test] + fn try_insert_batch() { + let mut world = World::default(); + let e0 = world.spawn(A(0)).id(); + let e1 = Entity::from_raw(1); + + let values = vec![(e0, (A(1), B(0))), (e1, (A(0), B(1)))]; + + world.try_insert_batch(values); + + assert_eq!( + world.get::(e0), + Some(&A(1)), + "first entity's A component should have been replaced" + ); + assert_eq!( + world.get::(e0), + Some(&B(0)), + "first entity should have received B component" + ); + } + + #[test] + fn try_insert_batch_if_new() { + let mut world = World::default(); + let e0 = world.spawn(A(0)).id(); + let e1 = Entity::from_raw(1); + + let values = vec![(e0, (A(1), B(0))), (e1, (A(0), B(1)))]; + + world.try_insert_batch_if_new(values); + + assert_eq!( + world.get::(e0), + Some(&A(0)), + "first entity's A component should not have been replaced" + ); + assert_eq!( + world.get::(e0), + Some(&B(0)), + "first entity should have received B component" + ); + } + #[test] fn required_components() { #[derive(Component)] @@ -2029,6 +2095,138 @@ mod tests { assert!(e.contains::()); } + #[test] + fn remove_component_and_its_runtime_required_components() { + #[derive(Component)] + struct X; + + #[derive(Component, Default)] + struct Y; + + #[derive(Component, Default)] + struct Z; + + #[derive(Component)] + struct V; + + let mut world = World::new(); + world.register_required_components::(); + world.register_required_components::(); + + let e = world.spawn((X, V)).id(); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + //check that `remove` works as expected + world.entity_mut(e).remove::(); + assert!(!world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + world.entity_mut(e).insert(X); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + //remove `X` again and ensure that `Y` and `Z` was removed too + world.entity_mut(e).remove_with_requires::(); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + } + + #[test] + fn remove_component_and_its_required_components() { + #[derive(Component)] + #[require(Y)] + struct X; + + #[derive(Component, Default)] + #[require(Z)] + struct Y; + + #[derive(Component, Default)] + struct Z; + + #[derive(Component)] + struct V; + + let mut world = World::new(); + + let e = world.spawn((X, V)).id(); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + //check that `remove` works as expected + world.entity_mut(e).remove::(); + assert!(!world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + world.entity_mut(e).insert(X); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + //remove `X` again and ensure that `Y` and `Z` was removed too + world.entity_mut(e).remove_with_requires::(); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + } + + #[test] + fn remove_bundle_and_his_required_components() { + #[derive(Component, Default)] + #[require(Y)] + struct X; + + #[derive(Component, Default)] + struct Y; + + #[derive(Component, Default)] + #[require(W)] + struct Z; + + #[derive(Component, Default)] + struct W; + + #[derive(Component)] + struct V; + + #[derive(Bundle, Default)] + struct TestBundle { + x: X, + z: Z, + } + + let mut world = World::new(); + let e = world.spawn((TestBundle::default(), V)).id(); + + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + + world.entity_mut(e).remove_with_requires::(); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(!world.entity(e).contains::()); + assert!(world.entity(e).contains::()); + } + #[test] fn runtime_required_components() { // Same as `required_components` test but with runtime registration @@ -2162,6 +2360,118 @@ mod tests { ); } + #[test] + fn runtime_required_components_propagate_up() { + // `A` requires `B` directly. + #[derive(Component)] + #[require(B)] + struct A; + + #[derive(Component, Default)] + struct B; + + #[derive(Component, Default)] + struct C; + + let mut world = World::new(); + + // `B` requires `C` with a runtime registration. + // `A` should also require `C` because it requires `B`. + world.register_required_components::(); + + let id = world.spawn(A).id(); + + assert!(world.entity(id).get::().is_some()); + } + + #[test] + fn runtime_required_components_propagate_up_even_more() { + #[derive(Component)] + struct A; + + #[derive(Component, Default)] + struct B; + + #[derive(Component, Default)] + struct C; + + #[derive(Component, Default)] + struct D; + + let mut world = World::new(); + + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components::(); + + let id = world.spawn(A).id(); + + assert!(world.entity(id).get::().is_some()); + } + + #[test] + fn runtime_required_components_deep_require_does_not_override_shallow_require() { + #[derive(Component)] + struct A; + #[derive(Component, Default)] + struct B; + #[derive(Component, Default)] + struct C; + #[derive(Component)] + struct Counter(i32); + #[derive(Component, Default)] + struct D; + + let mut world = World::new(); + + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components_with::(|| Counter(2)); + // This should replace the require constructor in A since it is + // shallower. + world.register_required_components_with::(|| Counter(1)); + + let id = world.spawn(A).id(); + + // The "shallower" of the two components is used. + assert_eq!(world.entity(id).get::().unwrap().0, 1); + } + + #[test] + fn runtime_required_components_deep_require_does_not_override_shallow_require_deep_subtree_after_shallow( + ) { + #[derive(Component)] + struct A; + #[derive(Component, Default)] + struct B; + #[derive(Component, Default)] + struct C; + #[derive(Component, Default)] + struct D; + #[derive(Component, Default)] + struct E; + #[derive(Component)] + struct Counter(i32); + #[derive(Component, Default)] + struct F; + + let mut world = World::new(); + + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components::(); + world.register_required_components_with::(|| Counter(1)); + world.register_required_components_with::(|| Counter(2)); + world.register_required_components::(); + + let id = world.spawn(A).id(); + + // The "shallower" of the two components is used. + assert_eq!(world.entity(id).get::().unwrap().0, 1); + } + #[test] fn runtime_required_components_existing_archetype() { #[derive(Component)] @@ -2282,6 +2592,24 @@ mod tests { assert_eq!(to_vec(required_z), vec![(b, 0), (c, 1)]); } + #[test] + #[should_panic = "Recursive required components detected: A → B → C → B\nhelp: If this is intentional, consider merging the components."] + fn required_components_recursion_errors() { + #[derive(Component, Default)] + #[require(B)] + struct A; + + #[derive(Component, Default)] + #[require(C)] + struct B; + + #[derive(Component, Default)] + #[require(B)] + struct C; + + World::new().register_component::(); + } + // These structs are primarily compilation tests to test the derive macros. Because they are // never constructed, we have to manually silence the `dead_code` lint. #[allow(dead_code)] diff --git a/crates/bevy_ecs/src/observer/entity_observer.rs b/crates/bevy_ecs/src/observer/entity_observer.rs index 8223443f276a6f..9ef5a2a5d39372 100644 --- a/crates/bevy_ecs/src/observer/entity_observer.rs +++ b/crates/bevy_ecs/src/observer/entity_observer.rs @@ -1,7 +1,8 @@ use crate::{ - component::{Component, ComponentHooks, StorageType}, - entity::Entity, + component::{Component, ComponentCloneHandler, ComponentHooks, Mutable, StorageType}, + entity::{Entity, EntityCloneBuilder, EntityCloner}, observer::ObserverState, + world::{DeferredWorld, World}, }; /// Tracks a list of entity observers for the [`Entity`] [`ObservedBy`] is added to. @@ -10,6 +11,7 @@ pub(crate) struct ObservedBy(pub(crate) Vec); impl Component for ObservedBy { const STORAGE_TYPE: StorageType = StorageType::SparseSet; + type Mutability = Mutable; fn register_component_hooks(hooks: &mut ComponentHooks) { hooks.on_remove(|mut world, entity, _, _| { @@ -19,7 +21,7 @@ impl Component for ObservedBy { }; for e in observed_by { let (total_entities, despawned_watched_entities) = { - let Some(mut entity_mut) = world.get_entity_mut(e) else { + let Ok(mut entity_mut) = world.get_entity_mut(e) else { continue; }; let Some(mut state) = entity_mut.get_mut::() else { @@ -39,4 +41,110 @@ impl Component for ObservedBy { } }); } + + fn get_component_clone_handler() -> ComponentCloneHandler { + ComponentCloneHandler::Ignore + } +} + +/// Trait that holds functions for configuring interaction with observers during entity cloning. +pub trait CloneEntityWithObserversExt { + /// Sets the option to automatically add cloned entities to the obsevers targeting source entity. + fn add_observers(&mut self, add_observers: bool) -> &mut Self; +} + +impl CloneEntityWithObserversExt for EntityCloneBuilder<'_> { + fn add_observers(&mut self, add_observers: bool) -> &mut Self { + if add_observers { + self.override_component_clone_handler::(ComponentCloneHandler::Custom( + component_clone_observed_by, + )) + } else { + self.remove_component_clone_handler_override::() + } + } +} + +fn component_clone_observed_by(world: &mut DeferredWorld, entity_cloner: &EntityCloner) { + let target = entity_cloner.target(); + let source = entity_cloner.source(); + + world.commands().queue(move |world: &mut World| { + let observed_by = world + .get::(source) + .map(|observed_by| observed_by.0.clone()) + .expect("Source entity must have ObservedBy"); + + world + .entity_mut(target) + .insert(ObservedBy(observed_by.clone())); + + for observer in &observed_by { + let mut observer_state = world + .get_mut::(*observer) + .expect("Source observer entity must have ObserverState"); + observer_state.descriptor.entities.push(target); + let event_types = observer_state.descriptor.events.clone(); + let components = observer_state.descriptor.components.clone(); + for event_type in event_types { + let observers = world.observers.get_observers(event_type); + if components.is_empty() { + if let Some(map) = observers.entity_observers.get(&source).cloned() { + observers.entity_observers.insert(target, map); + } + } else { + for component in &components { + let Some(observers) = observers.component_observers.get_mut(component) + else { + continue; + }; + if let Some(map) = observers.entity_map.get(&source).cloned() { + observers.entity_map.insert(target, map); + } + } + } + } + } + }); +} + +#[cfg(test)] +mod tests { + use crate::{ + self as bevy_ecs, + entity::EntityCloneBuilder, + event::Event, + observer::{CloneEntityWithObserversExt, Trigger}, + system::{ResMut, Resource}, + world::World, + }; + + #[derive(Resource, Default)] + struct Num(usize); + + #[derive(Event)] + struct E; + + #[test] + fn clone_entity_with_observer() { + let mut world = World::default(); + world.init_resource::(); + + let e = world + .spawn_empty() + .observe(|_: Trigger, mut res: ResMut| res.0 += 1) + .id(); + world.flush(); + + world.trigger_targets(E, e); + + let e_clone = world.spawn_empty().id(); + let mut builder = EntityCloneBuilder::new(&mut world); + builder.add_observers(true); + builder.clone_entity(e, e_clone); + + world.trigger_targets(E, [e, e_clone]); + + assert_eq!(world.resource::().0, 3); + } } diff --git a/crates/bevy_ecs/src/observer/mod.rs b/crates/bevy_ecs/src/observer/mod.rs index 8f9b2c161d7d03..f37200154b4e5b 100644 --- a/crates/bevy_ecs/src/observer/mod.rs +++ b/crates/bevy_ecs/src/observer/mod.rs @@ -4,6 +4,7 @@ mod entity_observer; mod runner; mod trigger_event; +pub use entity_observer::CloneEntityWithObserversExt; pub use runner::*; pub use trigger_event::*; @@ -18,12 +19,14 @@ use crate::{ }; use bevy_ptr::Ptr; use bevy_utils::HashMap; +#[cfg(feature = "track_change_detection")] use core::panic::Location; use core::{ fmt::Debug, marker::PhantomData, ops::{Deref, DerefMut}, }; +use smallvec::SmallVec; /// Type containing triggered [`Event`] information for a given run of an [`Observer`]. This contains the /// [`Event`] data itself. If it was triggered for a specific [`Entity`], it includes that as well. It also @@ -66,9 +69,29 @@ impl<'w, E, B: Bundle> Trigger<'w, E, B> { Ptr::from(&self.event) } - /// Returns the [`Entity`] that triggered the observer, could be [`Entity::PLACEHOLDER`]. - pub fn entity(&self) -> Entity { - self.trigger.entity + /// Returns the [`Entity`] that was targeted by the `event` that triggered this observer. It may + /// be [`Entity::PLACEHOLDER`]. + /// + /// Observable events can target specific entities. When those events fire, they will trigger + /// any observers on the targeted entities. In this case, the `target()` and `observer()` are + /// the same, because the observer that was triggered is attached to the entity that was + /// targeted by the event. + /// + /// However, it is also possible for those events to bubble up the entity hierarchy and trigger + /// observers on *different* entities, or trigger a global observer. In these cases, the + /// observing entity is *different* from the entity being targeted by the event. + /// + /// This is an important distinction: the entity reacting to an event is not always the same as + /// the entity triggered by the event. + pub fn target(&self) -> Entity { + self.trigger.target + } + + /// Returns the components that triggered the observer, out of the + /// components defined in `B`. Does not necessarily include all of them as + /// `B` acts like an `OR` filter rather than an `AND` filter. + pub fn components(&self) -> &[ComponentId] { + &self.trigger.components } /// Returns the [`Entity`] that observed the triggered event. @@ -194,17 +217,25 @@ impl ObserverDescriptor { pub struct ObserverTrigger { /// The [`Entity`] of the observer handling the trigger. pub observer: Entity, - - /// The [`ComponentId`] the trigger targeted. + /// The [`Event`] the trigger targeted. pub event_type: ComponentId, - + /// The [`ComponentId`]s the trigger targeted. + components: SmallVec<[ComponentId; 2]>, /// The entity the trigger targeted. - pub entity: Entity, + pub target: Entity, /// The location of the source code that triggered the obserer. + #[cfg(feature = "track_change_detection")] pub caller: &'static Location<'static>, } +impl ObserverTrigger { + /// Returns the components that the trigger targeted. + pub fn components(&self) -> &[ComponentId] { + &self.components + } +} + // Map between an observer entity and its runner type ObserverMap = EntityHashMap; @@ -265,11 +296,11 @@ impl Observers { pub(crate) fn invoke( mut world: DeferredWorld, event_type: ComponentId, - entity: Entity, - components: impl Iterator, + target: Entity, + components: impl Iterator + Clone, data: &mut T, propagate: &mut bool, - caller: &'static Location<'static>, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, ) { // SAFETY: You cannot get a mutable reference to `observers` from `DeferredWorld` let (mut world, observers) = unsafe { @@ -284,13 +315,17 @@ impl Observers { (world.into_deferred(), observers) }; + let trigger_for_components = components.clone(); + let mut trigger_observer = |(&observer, runner): (&Entity, &ObserverRunner)| { (runner)( world.reborrow(), ObserverTrigger { observer, event_type, - entity, + components: components.clone().collect(), + target, + #[cfg(feature = "track_change_detection")] caller, }, data.into(), @@ -301,22 +336,22 @@ impl Observers { observers.map.iter().for_each(&mut trigger_observer); // Trigger entity observers listening for this kind of trigger - if entity != Entity::PLACEHOLDER { - if let Some(map) = observers.entity_observers.get(&entity) { + if target != Entity::PLACEHOLDER { + if let Some(map) = observers.entity_observers.get(&target) { map.iter().for_each(&mut trigger_observer); } } // Trigger observers listening to this trigger targeting a specific component - components.for_each(|id| { + trigger_for_components.for_each(|id| { if let Some(component_observers) = observers.component_observers.get(&id) { component_observers .map .iter() .for_each(&mut trigger_observer); - if entity != Entity::PLACEHOLDER { - if let Some(map) = component_observers.entity_map.get(&entity) { + if target != Entity::PLACEHOLDER { + if let Some(map) = component_observers.entity_map.get(&target) { map.iter().for_each(&mut trigger_observer); } } @@ -370,8 +405,29 @@ impl Observers { } impl World { - /// Spawns a "global" [`Observer`] and returns its [`Entity`]. - pub fn observe( + /// Spawns a "global" [`Observer`] which will watch for the given event. + /// Returns its [`Entity`] as a [`EntityWorldMut`]. + /// + /// **Calling [`observe`](EntityWorldMut::observe) on the returned + /// [`EntityWorldMut`] will observe the observer itself, which you very + /// likely do not want.** + /// + /// # Example + /// + /// ``` + /// # use bevy_ecs::prelude::*; + /// #[derive(Component)] + /// struct A; + /// + /// # let mut world = World::new(); + /// world.add_observer(|_: Trigger| { + /// // ... + /// }); + /// world.add_observer(|_: Trigger| { + /// // ... + /// }); + /// ``` + pub fn add_observer( &mut self, system: impl IntoObserverSystem, ) -> EntityWorldMut { @@ -388,6 +444,7 @@ impl World { TriggerEvent { event, targets: (), + #[cfg(feature = "track_change_detection")] caller: Location::caller(), } .trigger(self); @@ -402,6 +459,7 @@ impl World { TriggerEvent { event, targets: (), + #[cfg(feature = "track_change_detection")] caller: Location::caller(), } .trigger_ref(self); @@ -417,6 +475,7 @@ impl World { TriggerEvent { event, targets, + #[cfg(feature = "track_change_detection")] caller: Location::caller(), } .trigger(self); @@ -432,6 +491,7 @@ impl World { TriggerEvent { event, targets, + #[cfg(feature = "track_change_detection")] caller: Location::caller(), } .trigger_ref(self); @@ -446,7 +506,7 @@ impl World { // Populate ObservedBy for each observed entity. for watched_entity in &(*observer_state).descriptor.entities { let mut entity_mut = self.entity_mut(*watched_entity); - let mut observed_by = entity_mut.entry::().or_default(); + let mut observed_by = entity_mut.entry::().or_default().into_mut(); observed_by.0.push(observer_entity); } (&*observer_state, &mut self.archetypes, &mut self.observers) @@ -562,8 +622,10 @@ mod tests { use core::panic::Location; use bevy_ptr::OwningPtr; + use bevy_utils::HashMap; use crate as bevy_ecs; + use crate::component::ComponentId; use crate::{ observer::{EmitDynamicTrigger, Observer, ObserverDescriptor, ObserverState, OnReplace}, prelude::*, @@ -604,8 +666,8 @@ mod tests { #[derive(Component)] struct Parent(Entity); - impl Traversal for &'_ Parent { - fn traverse(item: Self::Item<'_>) -> Option { + impl Traversal for &'_ Parent { + fn traverse(item: Self::Item<'_>, _: &D) -> Option { Some(item.0) } } @@ -624,10 +686,14 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("insert")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("replace")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("remove")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add")); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("insert")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("replace"); + }); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("remove")); let entity = world.spawn(A).id(); world.despawn(entity); @@ -642,10 +708,14 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("insert")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("replace")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("remove")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add")); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("insert")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("replace"); + }); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("remove")); let mut entity = world.spawn_empty(); entity.insert(A); @@ -662,10 +732,14 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("insert")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("replace")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("remove")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add")); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("insert")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("replace"); + }); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("remove")); let mut entity = world.spawn_empty(); entity.insert(S); @@ -684,10 +758,14 @@ mod tests { let entity = world.spawn(A).id(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("insert")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("replace")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("remove")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add")); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("insert")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("replace"); + }); + world + .add_observer(|_: Trigger, mut res: ResMut| res.observed("remove")); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut // and therefore does not automatically flush. @@ -703,26 +781,26 @@ mod tests { fn observer_order_recursive() { let mut world = World::new(); world.init_resource::(); - world.observe( + world.add_observer( |obs: Trigger, mut res: ResMut, mut commands: Commands| { res.observed("add_a"); - commands.entity(obs.entity()).insert(B); + commands.entity(obs.target()).insert(B); }, ); - world.observe( + world.add_observer( |obs: Trigger, mut res: ResMut, mut commands: Commands| { res.observed("remove_a"); - commands.entity(obs.entity()).remove::(); + commands.entity(obs.target()).remove::(); }, ); - world.observe( + world.add_observer( |obs: Trigger, mut res: ResMut, mut commands: Commands| { res.observed("add_b"); - commands.entity(obs.entity()).remove::(); + commands.entity(obs.target()).remove::(); }, ); - world.observe(|_: Trigger, mut res: ResMut| { + world.add_observer(|_: Trigger, mut res: ResMut| { res.observed("remove_b"); }); @@ -740,11 +818,11 @@ mod tests { fn observer_trigger_ref() { let mut world = World::new(); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 1); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 2); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 4); + world.add_observer(|mut trigger: Trigger| trigger.event_mut().counter += 1); + world.add_observer(|mut trigger: Trigger| trigger.event_mut().counter += 2); + world.add_observer(|mut trigger: Trigger| trigger.event_mut().counter += 4); // This flush is required for the last observer to be called when triggering the event, - // due to `World::observe` returning `WorldEntityMut`. + // due to `World::add_observer` returning `WorldEntityMut`. world.flush(); let mut event = EventWithData { counter: 0 }; @@ -756,11 +834,17 @@ mod tests { fn observer_trigger_targets_ref() { let mut world = World::new(); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 1); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 2); - world.observe(|mut trigger: Trigger| trigger.event_mut().counter += 4); + world.add_observer(|mut trigger: Trigger| { + trigger.event_mut().counter += 1; + }); + world.add_observer(|mut trigger: Trigger| { + trigger.event_mut().counter += 2; + }); + world.add_observer(|mut trigger: Trigger| { + trigger.event_mut().counter += 4; + }); // This flush is required for the last observer to be called when triggering the event, - // due to `World::observe` returning `WorldEntityMut`. + // due to `World::add_observer` returning `WorldEntityMut`. world.flush(); let mut event = EventWithData { counter: 0 }; @@ -774,8 +858,8 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add_1")); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add_2")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add_1")); + world.add_observer(|_: Trigger, mut res: ResMut| res.observed("add_2")); world.spawn(A).flush(); assert_eq!(vec!["add_1", "add_2"], world.resource::().0); @@ -813,7 +897,9 @@ mod tests { world.register_component::(); world.register_component::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add_ab")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("add_ab"); + }); let entity = world.spawn(A).id(); world.entity_mut(entity).insert(B); @@ -826,7 +912,9 @@ mod tests { let mut world = World::new(); let observer = world - .observe(|_: Trigger| panic!("Observer triggered after being despawned.")) + .add_observer(|_: Trigger| { + panic!("Observer triggered after being despawned.") + }) .id(); world.despawn(observer); world.spawn(A).flush(); @@ -840,10 +928,14 @@ mod tests { let entity = world.spawn((A, B)).flush(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("remove_a")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("remove_a"); + }); let observer = world - .observe(|_: Trigger| panic!("Observer triggered after being despawned.")) + .add_observer(|_: Trigger| { + panic!("Observer triggered after being despawned.") + }) .flush(); world.despawn(observer); @@ -857,7 +949,9 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("add_ab")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("add_ab"); + }); world.spawn((A, B)).flush(); assert_eq!(vec!["add_ab"], world.resource::().0); @@ -871,8 +965,8 @@ mod tests { world .spawn_empty() .observe(|_: Trigger| panic!("Trigger routed to non-targeted entity.")); - world.observe(move |obs: Trigger, mut res: ResMut| { - assert_eq!(obs.entity(), Entity::PLACEHOLDER); + world.add_observer(move |obs: Trigger, mut res: ResMut| { + assert_eq!(obs.target(), Entity::PLACEHOLDER); res.observed("event_a"); }); @@ -896,8 +990,8 @@ mod tests { .spawn_empty() .observe(|_: Trigger, mut res: ResMut| res.observed("a_1")) .id(); - world.observe(move |obs: Trigger, mut res: ResMut| { - assert_eq!(obs.entity(), entity); + world.add_observer(move |obs: Trigger, mut res: ResMut| { + assert_eq!(obs.target(), entity); res.observed("a_2"); }); @@ -962,12 +1056,16 @@ mod tests { let parent = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("parent")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("parent"); + }) .id(); let child = world .spawn(Parent(parent)) - .observe(|_: Trigger, mut res: ResMut| res.observed("child")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("child"); + }) .id(); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut @@ -985,12 +1083,16 @@ mod tests { let parent = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("parent")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("parent"); + }) .id(); let child = world .spawn(Parent(parent)) - .observe(|_: Trigger, mut res: ResMut| res.observed("child")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("child"); + }) .id(); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut @@ -1011,12 +1113,16 @@ mod tests { let parent = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("parent")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("parent"); + }) .id(); let child = world .spawn(Parent(parent)) - .observe(|_: Trigger, mut res: ResMut| res.observed("child")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("child"); + }) .id(); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut @@ -1037,7 +1143,9 @@ mod tests { let parent = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("parent")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("parent"); + }) .id(); let child = world @@ -1065,7 +1173,9 @@ mod tests { let parent = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("parent")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("parent"); + }) .id(); let child_a = world @@ -1100,7 +1210,9 @@ mod tests { let entity = world .spawn_empty() - .observe(|_: Trigger, mut res: ResMut| res.observed("event")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("event"); + }) .id(); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut @@ -1142,7 +1254,9 @@ mod tests { let child_b = world .spawn(Parent(parent_b)) - .observe(|_: Trigger, mut res: ResMut| res.observed("child_b")) + .observe(|_: Trigger, mut res: ResMut| { + res.observed("child_b"); + }) .id(); // TODO: ideally this flush is not necessary, but right now observe() returns WorldEntityMut @@ -1161,7 +1275,9 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe(|_: Trigger, mut res: ResMut| res.observed("event")); + world.add_observer(|_: Trigger, mut res: ResMut| { + res.observed("event"); + }); let grandparent = world.spawn_empty().id(); let parent = world.spawn(Parent(grandparent)).id(); @@ -1180,9 +1296,9 @@ mod tests { let mut world = World::new(); world.init_resource::(); - world.observe( + world.add_observer( |trigger: Trigger, query: Query<&A>, mut res: ResMut| { - if query.get(trigger.entity()).is_ok() { + if query.get(trigger.target()).is_ok() { res.observed("event"); } }, @@ -1207,7 +1323,7 @@ mod tests { let mut world = World::new(); // Observe the removal of A - this will run during despawn - world.observe(|_: Trigger, mut cmd: Commands| { + world.add_observer(|_: Trigger, mut cmd: Commands| { // Spawn a new entity - this reserves a new ID and requires a flush // afterward before Entities::free can be called. cmd.spawn_empty(); @@ -1224,9 +1340,6 @@ mod tests { #[test] fn observer_invalid_params() { - #[derive(Event)] - struct EventA; - #[derive(Resource)] struct ResA; @@ -1235,7 +1348,7 @@ mod tests { let mut world = World::new(); // This fails because `ResA` is not present in the world - world.observe(|_: Trigger, _: Res, mut commands: Commands| { + world.add_observer(|_: Trigger, _: Res, mut commands: Commands| { commands.insert_resource(ResB); }); world.trigger(EventA); @@ -1245,14 +1358,11 @@ mod tests { #[test] fn observer_apply_deferred_from_param_set() { - #[derive(Event)] - struct EventA; - #[derive(Resource)] struct ResA; let mut world = World::new(); - world.observe( + world.add_observer( |_: Trigger, mut params: ParamSet<(Query, Commands)>| { params.p1().insert_resource(ResA); }, @@ -1274,7 +1384,7 @@ mod tests { let caller = Location::caller(); let mut world = World::new(); - world.observe(move |trigger: Trigger| { + world.add_observer(move |trigger: Trigger| { assert_eq!(trigger.trigger.caller, caller); }); world.trigger(EventA); @@ -1288,13 +1398,43 @@ mod tests { let caller = Location::caller(); let mut world = World::new(); - world.observe(move |trigger: Trigger| { + world.add_observer(move |trigger: Trigger| { assert_eq!(trigger.trigger.caller, caller); }); - world.observe(move |trigger: Trigger| { + world.add_observer(move |trigger: Trigger| { assert_eq!(trigger.trigger.caller, caller); }); world.commands().spawn(Component).clear(); world.flush_commands(); } + + fn observer_triggered_components() { + #[derive(Resource, Default)] + struct Counter(HashMap); + + let mut world = World::new(); + world.init_resource::(); + let a_id = world.register_component::(); + let b_id = world.register_component::(); + + world.add_observer( + |trigger: Trigger, mut counter: ResMut| { + for &component in trigger.components() { + *counter.0.entry(component).or_default() += 1; + } + }, + ); + world.flush(); + + world.trigger_targets(EventA, [a_id, b_id]); + world.trigger_targets(EventA, a_id); + world.trigger_targets(EventA, b_id); + world.trigger_targets(EventA, [a_id, b_id]); + world.trigger_targets(EventA, a_id); + world.flush(); + + let counter = world.resource::(); + assert_eq!(4, *counter.0.get(&a_id).unwrap()); + assert_eq!(3, *counter.0.get(&b_id).unwrap()); + } } diff --git a/crates/bevy_ecs/src/observer/runner.rs b/crates/bevy_ecs/src/observer/runner.rs index b38d2f3a119106..2d75b64465b691 100644 --- a/crates/bevy_ecs/src/observer/runner.rs +++ b/crates/bevy_ecs/src/observer/runner.rs @@ -2,7 +2,7 @@ use core::any::Any; use core::panic::Location; use crate::{ - component::{ComponentHook, ComponentHooks, ComponentId, StorageType}, + component::{ComponentHook, ComponentHooks, ComponentId, Mutable, StorageType}, observer::{ObserverDescriptor, ObserverTrigger}, prelude::*, query::DebugCheckedUnwrap, @@ -63,6 +63,7 @@ impl ObserverState { impl Component for ObserverState { const STORAGE_TYPE: StorageType = StorageType::SparseSet; + type Mutability = Mutable; fn register_component_hooks(hooks: &mut ComponentHooks) { hooks.on_add(|mut world, entity, _, _| { @@ -89,7 +90,7 @@ impl Component for ObserverState { /// Type for function that is run when an observer is triggered. /// /// Typically refers to the default runner that runs the system stored in the associated [`Observer`] component, -/// but can be overridden for custom behaviour. +/// but can be overridden for custom behavior. pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: &mut bool); /// An [`Observer`] system. Add this [`Component`] to an [`Entity`] to turn it into an "observer". @@ -112,7 +113,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// message: String, /// } /// -/// world.observe(|trigger: Trigger| { +/// world.add_observer(|trigger: Trigger| { /// println!("{}", trigger.event().message); /// }); /// @@ -125,7 +126,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// }); /// ``` /// -/// Notice that we used [`World::observe`]. This is just a shorthand for spawning an [`Observer`] manually: +/// Notice that we used [`World::add_observer`]. This is just a shorthand for spawning an [`Observer`] manually: /// /// ``` /// # use bevy_ecs::prelude::*; @@ -133,7 +134,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// # #[derive(Event)] /// # struct Speak; /// // These are functionally the same: -/// world.observe(|trigger: Trigger| {}); +/// world.add_observer(|trigger: Trigger| {}); /// world.spawn(Observer::new(|trigger: Trigger| {})); /// ``` /// @@ -146,7 +147,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// # struct PrintNames; /// # #[derive(Component, Debug)] /// # struct Name; -/// world.observe(|trigger: Trigger, names: Query<&Name>| { +/// world.add_observer(|trigger: Trigger, names: Query<&Name>| { /// for name in &names { /// println!("{name:?}"); /// } @@ -164,7 +165,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// # struct SpawnThing; /// # #[derive(Component, Debug)] /// # struct Thing; -/// world.observe(|trigger: Trigger, mut commands: Commands| { +/// world.add_observer(|trigger: Trigger, mut commands: Commands| { /// commands.spawn(Thing); /// }); /// ``` @@ -178,7 +179,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// # struct A; /// # #[derive(Event)] /// # struct B; -/// world.observe(|trigger: Trigger, mut commands: Commands| { +/// world.add_observer(|trigger: Trigger, mut commands: Commands| { /// commands.trigger(B); /// }); /// ``` @@ -196,9 +197,9 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// #[derive(Event)] /// struct Explode; /// -/// world.observe(|trigger: Trigger, mut commands: Commands| { -/// println!("Entity {:?} goes BOOM!", trigger.entity()); -/// commands.entity(trigger.entity()).despawn(); +/// world.add_observer(|trigger: Trigger, mut commands: Commands| { +/// println!("Entity {:?} goes BOOM!", trigger.target()); +/// commands.entity(trigger.target()).despawn(); /// }); /// /// world.flush(); @@ -231,7 +232,7 @@ pub type ObserverRunner = fn(DeferredWorld, ObserverTrigger, PtrMut, propagate: /// # struct Explode; /// world.entity_mut(e1).observe(|trigger: Trigger, mut commands: Commands| { /// println!("Boom!"); -/// commands.entity(trigger.entity()).despawn(); +/// commands.entity(trigger.target()).despawn(); /// }); /// /// world.entity_mut(e2).observe(|trigger: Trigger, mut commands: Commands| { @@ -315,6 +316,7 @@ impl Observer { impl Component for Observer { const STORAGE_TYPE: StorageType = StorageType::SparseSet; + type Mutability = Mutable; fn register_component_hooks(hooks: &mut ComponentHooks) { hooks.on_add(|world, entity, id, caller| { let Some(observe) = world.get::(entity) else { @@ -394,7 +396,7 @@ fn hook_on_add>( mut world: DeferredWorld<'_>, entity: Entity, _: ComponentId, - _: &'static Location<'static>, + _: Option<&'static Location<'static>>, ) { world.commands().queue(move |world: &mut World| { let event_type = world.register_component::(); diff --git a/crates/bevy_ecs/src/observer/trigger_event.rs b/crates/bevy_ecs/src/observer/trigger_event.rs index 843658a3647b49..96eafa6746ec2a 100644 --- a/crates/bevy_ecs/src/observer/trigger_event.rs +++ b/crates/bevy_ecs/src/observer/trigger_event.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "track_change_detection")] use core::panic::Location; use crate::{ @@ -16,6 +17,7 @@ pub struct TriggerEvent { pub targets: Targets, /// The source code that emitted this command. + #[cfg(feature = "track_change_detection")] pub caller: &'static Location<'static>, } @@ -27,6 +29,7 @@ impl TriggerEvent { event_type, &mut self.event, self.targets, + #[cfg(feature = "track_change_detection")] self.caller, ); } @@ -35,7 +38,14 @@ impl TriggerEvent { impl TriggerEvent<&mut E, Targets> { pub(super) fn trigger_ref(self, world: &mut World) { let event_type = world.register_component::(); - trigger_event(world, event_type, self.event, self.targets, self.caller); + trigger_event( + world, + event_type, + self.event, + self.targets, + #[cfg(feature = "track_change_detection")] + self.caller, + ); } } @@ -52,6 +62,7 @@ pub struct EmitDynamicTrigger { event_type: ComponentId, event_data: T, targets: Targets, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, } @@ -65,6 +76,7 @@ impl EmitDynamicTrigger { event_type, event_data, targets, + #[cfg(feature = "track_change_detection")] caller: Location::caller(), } } @@ -79,6 +91,7 @@ impl Command self.event_type, &mut self.event_data, self.targets, + #[cfg(feature = "track_change_detection")] self.caller, ); } @@ -90,7 +103,7 @@ fn trigger_event( event_type: ComponentId, event_data: &mut E, targets: Targets, - caller: &'static Location<'static>, + #[cfg(feature = "track_change_detection")] caller: &'static Location<'static>, ) { let mut world = DeferredWorld::from(world); if targets.entities().is_empty() { @@ -102,6 +115,7 @@ fn trigger_event( targets.components(), event_data, false, + #[cfg(feature = "track_change_detection")] caller, ); }; @@ -115,6 +129,7 @@ fn trigger_event( targets.components(), event_data, E::AUTO_PROPAGATE, + #[cfg(feature = "track_change_detection")] caller, ); }; diff --git a/crates/bevy_ecs/src/query/access.rs b/crates/bevy_ecs/src/query/access.rs index cf504c2606635b..a0e311ab3dd5f7 100644 --- a/crates/bevy_ecs/src/query/access.rs +++ b/crates/bevy_ecs/src/query/access.rs @@ -1,5 +1,9 @@ +use crate::component::ComponentId; use crate::storage::SparseSetIndex; +use crate::world::World; use core::{fmt, fmt::Debug, marker::PhantomData}; +use derive_more::derive::From; +use disqualified::ShortName; use fixedbitset::FixedBitSet; /// A wrapper struct to make Debug representations of [`FixedBitSet`] easier @@ -727,6 +731,25 @@ impl Access { AccessConflicts::Individual(conflicts) } + /// Returns the indices of the resources this has access to. + pub fn resource_reads_and_writes(&self) -> impl Iterator + '_ { + self.resource_read_and_writes + .ones() + .map(T::get_sparse_set_index) + } + + /// Returns the indices of the resources this has non-exclusive access to. + pub fn resource_reads(&self) -> impl Iterator + '_ { + self.resource_read_and_writes + .difference(&self.resource_writes) + .map(T::get_sparse_set_index) + } + + /// Returns the indices of the resources this has exclusive access to. + pub fn resource_writes(&self) -> impl Iterator + '_ { + self.resource_writes.ones().map(T::get_sparse_set_index) + } + /// Returns the indices of the components that this has an archetypal access to. /// /// These are components whose values are not accessed (and thus will never cause conflicts), @@ -750,7 +773,7 @@ impl Access { /// `Access`, it's not recommended. Prefer to manage your own lists of /// accessible components if your application needs to do that. #[doc(hidden)] - #[deprecated] + // TODO: this should be deprecated and removed, see https://github.com/bevyengine/bevy/issues/16339 pub fn component_reads_and_writes(&self) -> (impl Iterator + '_, bool) { ( self.component_read_and_writes @@ -835,7 +858,7 @@ impl From> for FilteredAccessSet { } /// Records how two accesses conflict with each other -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, From)] pub enum AccessConflicts { /// Conflict is for all indices All, @@ -863,18 +886,34 @@ impl AccessConflicts { } } + pub(crate) fn format_conflict_list(&self, world: &World) -> String { + match self { + AccessConflicts::All => String::new(), + AccessConflicts::Individual(indices) => indices + .ones() + .map(|index| { + format!( + "{}", + ShortName( + world + .components + .get_info(ComponentId::get_sparse_set_index(index)) + .unwrap() + .name() + ) + ) + }) + .collect::>() + .join(", "), + } + } + /// An [`AccessConflicts`] which represents the absence of any conflict pub(crate) fn empty() -> Self { Self::Individual(FixedBitSet::new()) } } -impl From for AccessConflicts { - fn from(value: FixedBitSet) -> Self { - Self::Individual(value) - } -} - impl From> for AccessConflicts { fn from(value: Vec) -> Self { Self::Individual(value.iter().map(T::sparse_set_index).collect()) @@ -1239,6 +1278,20 @@ impl FilteredAccessSet { self.add(filter); } + /// Adds read access to all resources to the set. + pub(crate) fn add_unfiltered_read_all_resources(&mut self) { + let mut filter = FilteredAccess::default(); + filter.access.read_all_resources(); + self.add(filter); + } + + /// Adds write access to all resources to the set. + pub(crate) fn add_unfiltered_write_all_resources(&mut self) { + let mut filter = FilteredAccess::default(); + filter.access.write_all_resources(); + self.add(filter); + } + /// Adds all of the accesses from the passed set to `self`. pub fn extend(&mut self, filtered_access_set: FilteredAccessSet) { self.combined_access diff --git a/crates/bevy_ecs/src/query/error.rs b/crates/bevy_ecs/src/query/error.rs index 6778a75856249a..97cc011d39e1d9 100644 --- a/crates/bevy_ecs/src/query/error.rs +++ b/crates/bevy_ecs/src/query/error.rs @@ -70,7 +70,7 @@ fn format_archetype( .components() .get_name(component_id) .expect("entity does not belong to world"); - write!(f, "{name}")?; + write!(f, "{}", disqualified::ShortName(name))?; } Ok(()) } @@ -124,6 +124,9 @@ mod test { .get(&world, entity) .unwrap_err(); - assert_eq!(format!("{err:?}"), "QueryDoesNotMatch(0v1 with components bevy_ecs::query::error::test::query_does_not_match::Present1, bevy_ecs::query::error::test::query_does_not_match::Present2)"); + assert_eq!( + format!("{err:?}"), + "QueryDoesNotMatch(0v1 with components Present1, Present2)" + ); } } diff --git a/crates/bevy_ecs/src/query/fetch.rs b/crates/bevy_ecs/src/query/fetch.rs index 17d92fc97930ae..a56754d95b8f88 100644 --- a/crates/bevy_ecs/src/query/fetch.rs +++ b/crates/bevy_ecs/src/query/fetch.rs @@ -2,7 +2,7 @@ use crate::{ archetype::{Archetype, Archetypes}, bundle::Bundle, change_detection::{MaybeThinSlicePtrLocation, Ticks, TicksMut}, - component::{Component, ComponentId, Components, StorageType, Tick}, + component::{Component, ComponentId, Components, Mutable, StorageType, Tick}, entity::{Entities, Entity, EntityLocation}, query::{Access, DebugCheckedUnwrap, FilteredAccess, WorldQuery}, storage::{ComponentSparseSet, Table, TableRow}, @@ -12,9 +12,9 @@ use crate::{ }, }; use bevy_ptr::{ThinSlicePtr, UnsafeCellDeref}; -use bevy_utils::all_tuples; use core::{cell::UnsafeCell, marker::PhantomData}; use smallvec::SmallVec; +use variadics_please::all_tuples; /// Types that can be fetched from a [`World`] using a [`Query`]. /// @@ -272,7 +272,8 @@ use smallvec::SmallVec; /// [`ReadOnly`]: Self::ReadOnly #[diagnostic::on_unimplemented( message = "`{Self}` is not valid to request as data in a `Query`", - label = "invalid `Query` data" + label = "invalid `Query` data", + note = "if `{Self}` is a component type, try using `&{Self}` or `&mut {Self}`" )] pub unsafe trait QueryData: WorldQuery { /// The read-only variant of this [`QueryData`], which satisfies the [`ReadOnlyQueryData`] trait. @@ -1057,7 +1058,7 @@ unsafe impl QueryData for &Archetype { /// SAFETY: access is read only unsafe impl ReadOnlyQueryData for &Archetype {} -#[doc(hidden)] +/// The [`WorldQuery::Fetch`] type for `& T`. pub struct ReadFetch<'w, T: Component> { components: StorageSwitch< T, @@ -1415,7 +1416,7 @@ unsafe impl<'__w, T: Component> QueryData for Ref<'__w, T> { /// SAFETY: access is read only unsafe impl<'__w, T: Component> ReadOnlyQueryData for Ref<'__w, T> {} -#[doc(hidden)] +/// The [`WorldQuery::Fetch`] type for `&mut T`. pub struct WriteFetch<'w, T: Component> { components: StorageSwitch< T, @@ -1607,7 +1608,7 @@ unsafe impl<'__w, T: Component> WorldQuery for &'__w mut T { } /// SAFETY: access of `&T` is a subset of `&mut T` -unsafe impl<'__w, T: Component> QueryData for &'__w mut T { +unsafe impl<'__w, T: Component> QueryData for &'__w mut T { type ReadOnly = &'__w T; } @@ -2030,8 +2031,8 @@ macro_rules! impl_tuple_query_data { } macro_rules! impl_anytuple_fetch { - ($(($name: ident, $state: ident)),*) => { - + ($(#[$meta:meta])* $(($name: ident, $state: ident)),*) => { + $(#[$meta])* #[allow(non_snake_case)] #[allow(clippy::unused_unit)] /// SAFETY: @@ -2153,6 +2154,7 @@ macro_rules! impl_anytuple_fetch { } } + $(#[$meta])* #[allow(non_snake_case)] #[allow(clippy::unused_unit)] // SAFETY: defers to soundness of `$name: WorldQuery` impl @@ -2160,6 +2162,7 @@ macro_rules! impl_anytuple_fetch { type ReadOnly = AnyOf<($($name::ReadOnly,)*)>; } + $(#[$meta])* /// SAFETY: each item in the tuple is read only unsafe impl<$($name: ReadOnlyQueryData),*> ReadOnlyQueryData for AnyOf<($($name,)*)> {} }; @@ -2173,7 +2176,14 @@ all_tuples!( F, S ); -all_tuples!(impl_anytuple_fetch, 0, 15, F, S); +all_tuples!( + #[doc(fake_variadic)] + impl_anytuple_fetch, + 0, + 15, + F, + S +); /// [`WorldQuery`] used to nullify queries by turning `Query` into `Query>` /// diff --git a/crates/bevy_ecs/src/query/filter.rs b/crates/bevy_ecs/src/query/filter.rs index e75f506039a298..b096e801f4cc2e 100644 --- a/crates/bevy_ecs/src/query/filter.rs +++ b/crates/bevy_ecs/src/query/filter.rs @@ -7,8 +7,8 @@ use crate::{ world::{unsafe_world_cell::UnsafeWorldCell, World}, }; use bevy_ptr::{ThinSlicePtr, UnsafeCellDeref}; -use bevy_utils::all_tuples; use core::{cell::UnsafeCell, marker::PhantomData}; +use variadics_please::all_tuples; /// Types that filter the results of a [`Query`]. /// @@ -352,9 +352,9 @@ unsafe impl QueryFilter for Without { /// # #[derive(Component, Debug)] /// # struct Color {}; /// # #[derive(Component)] -/// # struct Style {}; +/// # struct Node {}; /// # -/// fn print_cool_entity_system(query: Query, Changed