diff --git a/.editorconfig b/.editorconfig index 4d811b90ba..8d9c20c658 100644 --- a/.editorconfig +++ b/.editorconfig @@ -14,4 +14,4 @@ indent_size = 4 [*.md] indent_style = space -indent_size = 4 +indent_size = 2 diff --git a/.github/workflows/bench-branch.yml b/.github/workflows/bench-branch.yml index 68ce3c6dc1..db7e0c27f9 100644 --- a/.github/workflows/bench-branch.yml +++ b/.github/workflows/bench-branch.yml @@ -1,16 +1,38 @@ on: push: - branches-ignore: - - "dependabot/**" + branches: + - "master" tags: - "*" + pull_request: name: benchmark jobs: runBenchmark: name: run benchmark runs-on: ubuntu-latest + if: "!startsWith(github.head_ref, 'dependabot/')" steps: - - uses: actions/checkout@v3 + - name: Check out base + if: github.event_name == 'pull_request' + uses: actions/checkout@v3 + with: + ssh-key: ${{secrets.API_KEY}} + repo: ${{github.event.pull_request.base.repo.full_name}} + ref: ${{github.event.pull_request.base.ref}} + - name: Run benchmarks on base for comparison + if: github.event_name == 'pull_request' + run: cargo bench --features internal-bench + + - name: Check out head + uses: actions/checkout@v3 + if: github.event_name == 'pull_request' + with: + ssh-key: ${{secrets.API_KEY}} + repo: ${{github.event.pull_request.head.repo.full_name}} + ref: ${{github.event.pull_request.head.ref}} + - name: Check out head + uses: actions/checkout@v3 + if: github.event_name == 'push' with: ssh-key: ${{secrets.API_KEY}} @@ -38,5 +60,3 @@ jobs: - name: Push pages run: git push --force working-directory: ..site-repo - - run: git push origin HEAD:last-benched-tag - if: ${{github.ref_type}} == tag diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5da3225335..344912225d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -44,12 +44,29 @@ Naming type parameters follows the following conventions: - If the type parameter has no specific bounds and there are no more than two type parameters in scope, the dummy names `T` and `U` MAY be used. -- If the type parameter is defined in the scope of a key-value store, +- If the type parameter is defined in the scope of a key-value collection, i.e. the item at which the type parameters are defined corresponds exclusively to exactly one key-value item, - the names `K` and `V` MAY be used for key and value types. - `K` or `V` SHOULD NOT be used alone without the other one. + the names `K` and `V` MAY be used for key and value types on the same collections. + `K` or `V` SHOULD NOT be used alone without the other one or on different collections. +- `F` and `I` MAY be used to represent closure and iterator types + only if there is no more than one generic function/iterator type in scope. +- Domain-specific acronyms: The following type parameters are ALLOWED only if + they describe the following dynec-specific concepts: + - `A`: Archetype + - `C`: Component (Simple or Isotope) + - `D`: Discriminant (for isotope components) + - `E`: Implements `entity::Ref` (NOT `entity::Raw`) - Otherwise, the full name SHOULD be described in PascalCase directly - if it does not collide with the name of a type/trait that may be imported + if it does not collide with the name of a type/trait used in this project (regardless whether it is *currently* imported). -- In the case of name collision, a `T` SHOULD be appended to its name. +- In the case of name collision, a `T` MAY be appended to its name. + +#### Imports + +- The `Result` type MUST NOT be aliased in the main crate. + However, importing `syn::Result` is RECOMMENDED in the codegen crate. +- Traits SHOULD be imported `as _` + if the imported trait identifier is not directly used + (if the module only uses the imported methods in the trait). +- Imports from the standard library MUST prefer `std` over `core`. diff --git a/Cargo.toml b/Cargo.toml index 33f0aaacde..10ab46dee9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,21 +23,31 @@ itertools = "0.10.3" log = "0.4.16" parking_lot = {version = "0.12.0", features = ["owning_ref", "arc_lock", "send_guard"]} rand = "0.8.5" -rayon = "1.5.2" +rayon = "1.8.0" static_assertions = "1.1.0" strum = {version = "0.24.0", optional = true} xias = "0.3.0" +derive-trait = "0.0.4" [features] default = ["debug-entity-rc"] debug-entity-rc = [] # Enable entity refcounting in debug mode. release-entity-rc = [] # Enable entity refcounting in debug mode. -internal-bench = ["env_logger", "strum"] # Internal feature: enable benchmarking utils. +tuple-impl-32-zip = ["tuple-impl-24-zip"] +tuple-impl-24-zip = ["tuple-impl-16-zip"] +tuple-impl-16-zip = ["tuple-impl-8-zip"] +tuple-impl-8-zip = [] +tuple-impl-32-init-fn = ["tuple-impl-24-init-fn"] +tuple-impl-24-init-fn = ["tuple-impl-16-init-fn"] +tuple-impl-16-init-fn = ["tuple-impl-8-init-fn"] +tuple-impl-8-init-fn = [] +internal-bench = ["env_logger", "strum", "tuple-impl-8-zip"] # Internal feature: enable benchmarking utils. [dev-dependencies] criterion = { version = "0.4.0", features = ["html_reports"] } env_logger = "0.10.0" lazy_static = "1.4.0" +paste = "1.0.14" strum = "0.24.0" [lib] @@ -56,4 +66,4 @@ name = "iter_entity_add" harness = false [profile.bench] -debug = true +lto = true diff --git a/benches/iter_entity_add.rs b/benches/iter_entity_add.rs index 0eb0c88b4d..8c5d8bc32a 100644 --- a/benches/iter_entity_add.rs +++ b/benches/iter_entity_add.rs @@ -1,239 +1,202 @@ use std::iter; use std::time::Duration; +use criterion::measurement::WallTime; use criterion::*; -use dynec::{system, test_util}; +use dynec::test_util::TestArch; +use dynec::{comp, system}; +use rand::rngs::ThreadRng; use rand::Rng; -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct PositionX(f64); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct PositionY(f64); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct PositionZ(f64); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct VelocityX(f64); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct VelocityY(f64); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct VelocityZ(f64); -#[system] -fn system_individual_add_system_non_chunked( - mut px: impl system::WriteSimple, - mut py: impl system::WriteSimple, - mut pz: impl system::WriteSimple, - vx: impl system::ReadSimple, - vy: impl system::ReadSimple, - vz: impl system::ReadSimple, - entities: impl system::EntityIterator, -) { - for (_, (px, py, pz, vx, vy, vz)) in entities.entities_with(( - px.access_mut(), - py.access_mut(), - pz.access_mut(), - vx.access(), - vy.access(), - vz.access(), - )) { - px.0 += vx.0; - py.0 += vy.0; - pz.0 += vz.0; - } -} -fn iter_entity_add_individual_non_chunked(group: &mut BenchmarkGroup<'_, measurement::WallTime>) { - group.measurement_time(Duration::from_secs(10)); - - for log_entities in [12, 16] { - let num_entities = 1 << log_entities; - group.throughput(Throughput::Elements(num_entities)); - group.bench_with_input( - BenchmarkId::new("non-chunked (x, y, z)", format!("{num_entities} entities")), - &num_entities, - |b, &num_entities| { - let mut world = - dynec::system_test!(system_individual_add_system_non_chunked.build();); - let mut rng = rand::thread_rng(); - for _ in 0..num_entities { - world.create(dynec::comps![test_util::TestArch => - PositionX(rng.gen_range(-65536.0 ..= 65536.0)), - PositionY(rng.gen_range(-65536.0 ..= 65536.0)), - PositionZ(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityX(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityY(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityZ(rng.gen_range(-65536.0 ..= 65536.0)), - ]); - } - b.iter(|| { - world.execute(&dynec::tracer::Noop); - }) - }, - ); - } -} - -#[system] -fn system_individual_add_system_chunked( - mut px: impl system::WriteSimple, - mut py: impl system::WriteSimple, - mut pz: impl system::WriteSimple, - vx: impl system::ReadSimple, - vy: impl system::ReadSimple, - vz: impl system::ReadSimple, - entities: impl system::EntityIterator, -) { - for (_, (px, py, pz, vx, vy, vz)) in entities.chunks_with(( - px.access_chunk_mut(), - py.access_chunk_mut(), - pz.access_chunk_mut(), - vx.access_chunk(), - vy.access_chunk(), - vz.access_chunk(), - )) { - for (px, (py, (pz, (vx, (vy, vz))))) in - iter::zip(px, iter::zip(py, iter::zip(pz, iter::zip(vx, iter::zip(vy, vz))))) - { - px.0 += vx.0; - py.0 += vy.0; - pz.0 += vz.0; - } - } -} -fn iter_entity_add_individual_chunked(group: &mut BenchmarkGroup<'_, measurement::WallTime>) { - group.measurement_time(Duration::from_secs(10)); - - for log_entities in [12, 16] { - let num_entities = 1 << log_entities; - group.throughput(Throughput::Elements(num_entities)); - group.bench_with_input( - BenchmarkId::new("chunked (x,y,z)", format!("{num_entities} entities")), - &num_entities, - |b, &num_entities| { - let mut world = dynec::system_test!(system_individual_add_system_chunked.build();); - let mut rng = rand::thread_rng(); - for _ in 0..num_entities { - world.create(dynec::comps![test_util::TestArch => - PositionX(rng.gen_range(-65536.0 ..= 65536.0)), - PositionY(rng.gen_range(-65536.0 ..= 65536.0)), - PositionZ(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityX(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityY(rng.gen_range(-65536.0 ..= 65536.0)), - VelocityZ(rng.gen_range(-65536.0 ..= 65536.0)), - ]); - } - b.iter(|| { - world.execute(&dynec::tracer::Noop); - }) - }, - ); - } +fn individual_comps(rng: &mut ThreadRng) -> comp::Map { + dynec::comps![TestArch => + PositionX(rng.gen_range(-65536.0 ..= 65536.0)), + PositionY(rng.gen_range(-65536.0 ..= 65536.0)), + PositionZ(rng.gen_range(-65536.0 ..= 65536.0)), + VelocityX(rng.gen_range(-65536.0 ..= 65536.0)), + VelocityY(rng.gen_range(-65536.0 ..= 65536.0)), + VelocityZ(rng.gen_range(-65536.0 ..= 65536.0)), + ] } -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct PositionArray([f64; 3]); -#[dynec::comp(of = test_util::TestArch, required)] +#[dynec::comp(of = TestArch, required)] struct VelocityArray([f64; 3]); -#[system] -fn system_array_add_system_non_chunked( - mut p: impl system::WriteSimple, - v: impl system::ReadSimple, - entities: impl system::EntityIterator, -) { - for (_, (p, v)) in entities.entities_with((p.access_mut(), v.access())) { - for i in 0..3 { - p.0[i] += v.0[i]; - } - } +fn array_comps(rng: &mut ThreadRng) -> comp::Map { + dynec::comps![TestArch => + PositionArray([ + rng.gen_range(-65536.0 ..= 65536.0), + rng.gen_range(-65536.0 ..= 65536.0), + rng.gen_range(-65536.0 ..= 65536.0), + ]), + VelocityArray([ + rng.gen_range(-65536.0 ..= 65536.0), + rng.gen_range(-65536.0 ..= 65536.0), + rng.gen_range(-65536.0 ..= 65536.0), + ]), + ] } -fn iter_entity_add_array_non_chunked(group: &mut BenchmarkGroup<'_, measurement::WallTime>) { - group.measurement_time(Duration::from_secs(10)); - for log_entities in [12, 16] { - let num_entities = 1 << log_entities; - group.throughput(Throughput::Elements(num_entities)); - group.bench_with_input( - BenchmarkId::new("non-chunked [f64; 3]", format!("{num_entities} entities")), - &num_entities, - |b, &num_entities| { - let mut world = dynec::system_test!(system_array_add_system_non_chunked.build();); - let mut rng = rand::thread_rng(); - for _ in 0..num_entities { - world.create(dynec::comps![test_util::TestArch => - PositionArray([ - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - ]), - VelocityArray([ - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - ]), - ]); - } - b.iter(|| { - world.execute(&dynec::tracer::Noop); - }) - }, - ); - } -} +macro_rules! make_systems { + ($system_name:ident $iter_method:ident) => { + paste::paste! { + #[system] + fn [<$system_name _idv>]( + mut px: system::WriteSimple, + mut py: system::WriteSimple, + mut pz: system::WriteSimple, + vx: system::ReadSimple, + vy: system::ReadSimple, + vz: system::ReadSimple, + entities: system::EntityIterator, + ) { + entities.$iter_method((&mut px, &mut py, &mut pz, &vx, &vy, &vz)).for_each( + |(_, (px, py, pz, vx, vy, vz))| { + px.0 += vx.0; + py.0 += vy.0; + pz.0 += vz.0; + }, + ) + } -#[system] -fn system_array_add_system_chunked( - mut p: impl system::WriteSimple, - v: impl system::ReadSimple, - entities: impl system::EntityIterator, -) { - for (_, (p, v)) in entities.chunks_with((p.access_chunk_mut(), v.access_chunk())) { - for (p, v) in iter::zip(p, v) { - for i in 0..3 { - p.0[i] += v.0[i]; + #[system] + fn [<$system_name _arr>]( + mut p: system::WriteSimple, + v: system::ReadSimple, + entities: system::EntityIterator, + ) { + entities.$iter_method((&mut p, &v)).for_each( + |(_, (p, v))| { + for i in 0..3 { + p.0[i] += v.0[i]; + } + }, + ) } } - } + }; } -fn iter_entity_add_array_chunked(group: &mut BenchmarkGroup<'_, measurement::WallTime>) { + +make_systems!(system_add_ent entities_with); +make_systems!(system_add_chunk entities_with_chunked); + +fn bench_iter_entity_add( + group: &mut BenchmarkGroup<'_, measurement::WallTime>, + subgroup: &str, + function_name: &str, + build_system: impl Fn() -> SystemT, + make_comps: impl Fn(&mut ThreadRng) -> comp::Map, + entities_to_delete: impl Fn(u64) -> DeleteEntityIter, +) where + SystemT: system::Sendable, + DeleteEntityIter: Iterator, +{ group.measurement_time(Duration::from_secs(10)); - for log_entities in [12, 16] { - let num_entities = 1 << log_entities; - group.throughput(Throughput::Elements(num_entities)); - group.bench_with_input( - BenchmarkId::new("chunked [f64; 3]", format!("{num_entities} entities")), - &num_entities, - |b, &num_entities| { - let mut world = dynec::system_test!(system_array_add_system_chunked.build();); - let mut rng = rand::thread_rng(); - for _ in 0..num_entities { - world.create(dynec::comps![test_util::TestArch => - PositionArray([ - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - ]), - VelocityArray([ - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - rng.gen_range(-65536.0 ..= 65536.0), - ]), - ]); - } - b.iter(|| { - world.execute(&dynec::tracer::Noop); - }) - }, - ); - } + let num_entities = 65536; + group.throughput(Throughput::Elements(num_entities)); + group.bench_with_input( + BenchmarkId::new(function_name, subgroup), + &num_entities, + |b, &num_entities| { + let mut world = dynec::system_test!(build_system();); + let mut rng = rand::thread_rng(); + let mut entities: Vec<_> = + (0..num_entities).map(|_| world.create(make_comps(&mut rng))).map(Some).collect(); + for pos in entities_to_delete(num_entities) { + let entity = entities + .get_mut(pos as usize) + .expect("entities_to_delete yielded overflowing values"); + let entity = entity.take().expect("entities_to_delete yielded repeated values"); + world.delete(entity); + } + b.iter(|| world.execute(&dynec::tracer::Noop)) + }, + ); +} + +fn iter_entity_add_with_deletion>( + group: &mut BenchmarkGroup, + name: &str, + deletion: impl Fn(u64) -> DeleteEntityIter + Copy, +) { + bench_iter_entity_add( + group, + name, + "ent idv", + || system_add_ent_idv.build(), + individual_comps, + deletion, + ); + bench_iter_entity_add( + group, + name, + "chunk idv", + || system_add_chunk_idv.build(), + individual_comps, + deletion, + ); + bench_iter_entity_add( + group, + name, + "ent arr", + || system_add_ent_arr.build(), + array_comps, + deletion, + ); + bench_iter_entity_add( + group, + name, + "chunk arr", + || system_add_chunk_arr.build(), + array_comps, + deletion, + ); } fn iter_entity_add(c: &mut Criterion) { let mut group = c.benchmark_group("iter entity (p += v)"); - iter_entity_add_individual_non_chunked(&mut group); - iter_entity_add_individual_chunked(&mut group); - iter_entity_add_array_non_chunked(&mut group); - iter_entity_add_array_chunked(&mut group); + + iter_entity_add_with_deletion(&mut group, "full", |_| iter::empty()); + + const BASE_HOLES: [(u64, u64); 3] = [(1_u64, 2), (4, 8), (16, 12)]; + iter_entity_add_with_deletion(&mut group, "holes", |total| generate_holes(total, BASE_HOLES)); + iter_entity_add_with_deletion(&mut group, "holes 4x", |total| { + generate_holes(total, BASE_HOLES.map(|(keep, delete)| (keep * 4, delete * 4))) + }); +} + +fn generate_holes( + total: u64, + groups: impl Clone + IntoIterator, +) -> impl Iterator { + iter::repeat(groups) + .map(|group| group.into_iter()) + .flatten() + .scan(0, |state, (keep, delete)| { + *state += keep; + let start = *state; + *state += delete; + let end = *state; + Some(start..end) + }) + .flatten() + .take_while(move |&index| index < total) } criterion_group!(benches, iter_entity_add); diff --git a/book/src/ch1-ecs.md b/book/src/ch1-ecs.md index 04d2a57509..b650f30989 100644 --- a/book/src/ch1-ecs.md +++ b/book/src/ch1-ecs.md @@ -1,15 +1,16 @@ # ECS -dynec is an ECS (Entity-Component-System) framework. -It is a data-oriented programming approach where -data are mostly stored in "components" for different "entities", -and logic is run in "systems" that process the data. +Dynec uses concepts from the ECS (Entity-Component-System) paradigm. +It is a data-oriented programming approach that consists of three core concepts: + +- An **entity** represent different objects. +- Different **component**s store data for an entity. +- **Systems** process the components to execute game logic. ## Data -An entity corresponds to an object. -Different components store the data related to an object. -I like visualizing them as rows and columns, +An intuitive way to visualize entities and components +would be a table, where each row is an entity and each cell is a component of that entity: | Entity \# | Location | Hitpoint | Experience | @@ -18,37 +19,31 @@ where each row is an entity and each cell is a component of that entity: | 1 | (1, 3, 4) | 80 | 4 | | ⋮ | ⋮ | ⋮ | ⋮ | -Everything can be entities! +Everything can be an entity! For example, in a shooters game, -each player and and each bullet is a separate entity. +each player is an entity, +each bullet is an entity, +and even each inventory slot of the player may be an entity as well. + The components for a bullet are different from those for a player: -| Entity \# | Location | Speed | Damage | +| Entity \# | Location | Velocity | Damage | | :---: | :---: | :---: | :---: | | 0 | (1, 2.5, 3.5) | (0, 0.5, 0.5) | 20 | | ⋮ | ⋮ | ⋮ | ⋮ | -Unlike the traitional OOP pattern where -components of the same object are stored together, -ECS typically stores components of the same type together. -Since data of the same type are usually processed together in bulk, -CPU cache lines have much better efficiency -compared to the traditional random access on the heap. - ## Logic -A system is a function that processes data. -In a typical game or simulation, -each system are executed once per "cycle" (a.k.a. "ticks") in the main loop. +A system is a function that processes the data. +In a typical simulation program, +each system is executed once per "cycle" (a.k.a. "ticks") in a main loop. Usually, systems are implemented as loops that execute over all entities of a type: -``` +```text for each bullet entity { location[bullet] += speed[bullet] } ``` -An ECS framework schedules systems that can be run together on different threads. -Therefore, programs written with ECS are almost lock-free, -so they are more efficient on a multi-threaded environment -compared to traditional approaches that might result in frequent lock contention. +An ECS framework schedules systems to run on different threads. +Therefore, programs written with ECS are almost lock-free. diff --git a/book/src/ch2-archetypes.md b/book/src/ch2-archetypes.md index 78b69d30d6..4a520dad79 100644 --- a/book/src/ch2-archetypes.md +++ b/book/src/ch2-archetypes.md @@ -1,37 +1,46 @@ # Archetypes -In traditional ECS frameworks, entities are grouped by the components they have, -e.g. if an entity has a "location" and "speed" component, -it is considered as an entity that can move and -is looped by the system that requests these components. -The set of components created for an entity is called its "archetype", -which is comparable to "classes" in OOP. - -In dynec, entities are *statically archetyped*, -which means the possible components of an entity is *known* and *fixed* from creation. -In the analogy of rows and columns, an archetype is similar to a table. -As such, different archetypes have their own entity IDs. - -What if we want to add/remove components for an entity? -dynec still supports optional components, -but the entity is still stored in the same archetype, -so it still appears in the loop when systems iterate over this archetype. -If you would like to loop over entities with certain components, -it is a better idea to split the components to a separate entity with a new archetype -and loop on that archetype instead. -(It is also possible to loop over entities with a specific component, -but joining multiple components is not supported) +In traditional ECS, the type of an entity is identified by the components it has. +For example, an entity is considered to be "moveable" +if it has the "location" and "speed" components. +Systems iterate over the "moveable" entities by performing a "join query" +that intersects the entities with a "location" and the entities with a "speed". +Thus, an entity type is effectively a subtype of any combination of its components, +e.g. both "player" and "bullet" are subtypes of "moveable". + +Dynec takes a different approach on entity typing. +Dynec requires the type of an entity (its "archeytpe") to be +*known* during creation and *immutable* after creation ("statically archetyped"). +A reference to an entity always contains the archetype. + +Dynec still supports adding/removing components for an entity, +but this is implemented by making the component optional (effectively `Option`) +instead of changing its archetype. +Adding/removing a component would not affect +systems iterating over all entities of its archetype. + +To iterate over entities with only a specific component, +the suggested approach is to split the components +to a separate entity with a new archetype +and iterate over entities with that archetype instead. +(It is also possible to iterate over entities with a specific component, +but it is less efficient than iterate over all entities of the same component, +and joining multiple components is not supported) Archetypes are typically represented as an unconstructable type (an empty enum) -that is referenced as a type parameter in system declarations. +referenced as a type parameter in system declarations. +Therefore, multiple systems can reuse the same generic function +where the archetype is a type parameter, +achieving something similar to the "subtyping" approach. +Nevertheless, Dynec discourages treating archetypes as subtypes +and encourages splitting shared components to an entity. Therefore, it is possible to reuse the same function for multiple systems by leaving the archetype as a type parameter. -There is a convenience macro to achieve this: -```rust -use dynec::archetype; +An archetype can be declared through the [`dynec::archetype`][macro.archetype] macro: -archetype! { +```rust +dynec::archetype! { /// A building entity can store goods inside. pub Building; @@ -40,8 +49,8 @@ archetype! { } ``` -The [`archetype!` macro][macro.archetype] just declares an empty enum -that implements [`Archetype`][trait.archetype]. +There is nothing magical here; +each line just declares an empty enum and implements [`Archetype`][trait.archetype] for it. -[macro.archetype]: https://sof3.github.io/dynec/master/dynec/macro.archetype.html -[trait.archetype]: https://sof3.github.io/dynec/master/dynec/archetype/trait.Archetype.html +[macro.archetype]: ../dynec/macro.archetype.html +[trait.Archetype]: ../dynec/archetype/trait.Archetype.html diff --git a/book/src/ch3-components.md b/book/src/ch3-components.md index 49dc4ad214..5f57f28a25 100644 --- a/book/src/ch3-components.md +++ b/book/src/ch3-components.md @@ -1,5 +1,11 @@ # Components -Components store the actual data of entities. -In dynec, since entities are statically archetyped, -components are always only usable in specific archetypes. +Components store the actual data for an entity. +In Dynec, since entities are statically archetyped, +a component is only meaningful when specified togethre with an archetype. + +There are two kinds of components, namely "simple components" and "isotope components". +For simple components, each entity cannot have +more than one instance for each component type. +Meanwhile, isotope components allow storing multiple instances +of the same component type for the same entity. diff --git a/book/src/ch3.1-simple-components.md b/book/src/ch3.1-simple-components.md index 73ad09eac9..dae104b713 100644 --- a/book/src/ch3.1-simple-components.md +++ b/book/src/ch3.1-simple-components.md @@ -2,19 +2,20 @@ Simple components are components where each entity can have at most one instance of the component. -To declare that a type `C` is a component for entities of archetype `A`, -implement [`comp::Simple`][comp.simple] for `C`. -dynec provides a [convenience macro][macro.comp] for this: +A type can be used as a simple component for entities of archetype `A` +if it implements [`comp::Simple`][comp::Simple]. +Dynec provides [a convenience macro][macro.comp] to do this: ```rust #[comp(of = Bullet)] struct Location([f32; 3]); ``` -Since Rust allows implementing the same trait with different type parameters, -`C` can be used as a component for entities of both `A` and `B` -if it implements `comp::Simple` and `comp::Simple` separately. -This can be achieved by invoking the macro twice: +This declares a simple component called `Location` +that can be used on `Bullet` entities. + +The same type can be reused as components for multiple archetypes. +by applying the macro multiple times: ```rust use dynec::comp; @@ -24,20 +25,19 @@ use dynec::comp; struct Location([f32; 3]); ``` -Note that components are only stored on entities if at least one system uses it. - ## Initializer Simple components can be equipped with an auto-initializer. -When an entity is created without passing this component, -it is filled with the value returned by the auto-initializer. +If an entity is created without specifying this component, +the auto-initializer is called to fill the component. -The auto-initializer can depend on other simple components -passed by the entity creator or other auto-initializers. -Along with the fact that only components requested from systems get persisted, -this means you can pass a parameter during entity creation, -let other component auto-initializers read from this parameter, -and this parameter will get dropped after entity creation completes. +The auto-initializer can read values of other simple components, +either specified by the entity creator or returned by another auto-initializer. +Since Dynec does not persist a component +unless it is requested by a system or explicitly registered, +this means you can pass a temporary component during entity creation, +use its value in other component auto-initializers, +and this temporary component gets dropped after entity creation completes. The auto-initializer can be specified in the macro either as a closure: @@ -49,7 +49,8 @@ use dynec::comp; struct Damage(f32); ``` -or as a function pointer with arity (i.e. number of parameters for the function): +or as a function pointer with arity notation +(i.e. write the number of parameters for the function after a `/`): ```rust use dynec::comp; @@ -64,23 +65,23 @@ struct Damage(f32); ## Presence -A component can be either "required" or "optional". +A component is either `Required` or `Optional`. -The "optional" presence allows components to be missing on some entities. -Therefore, accessing optional components only returns `Option` instead of `C`. +`Optional` components may be missing on some entities. +Accessing optional components returns `Option` instead of `C`. -"Required" components must either have an auto-initializer +`Required` components must either have an auto-initializer or be passed during entity creation. -This ensures that accessing the component always succeeds for any entities. +This ensures that accessing the component always succeeds for an initialized entity; +optimizations such as chunk iteration are only possible for `Required` components. +Nevertheless, components are **always** missing +for uninitialized entities created during the middle of a tick; +more will be explained in later sections. -Note that entities created during the middle of a tick -are only fully initialized after the end of the tick. -More will be explained in later sections. - -Components with "required" presence should *both* -set [`PRESENCE = SimplePresence::Required`][comp.simple.presence] -*and* implement [`comp::Must`][must]. -This is automatically done by passing `required` in the macro: +A `Required` component must *both* +set [`PRESENCE = SimplePresence::Required`][comp::SimpleOrIsotope::PRESENCE] +*and* implement [`comp::Must`][Must]. +This is automatically done by specifying `required` in the `#[comp]` macro: ```rust use dynec::comp; @@ -91,62 +92,89 @@ struct Damage(u32); ## Finalizers -A finalizer is a component that prevents an entity from getting deleted. +A finalizer component is a component that prevents an entity from getting deleted. + +> Yes, I know this may be confusing. +> Contrary to finalizers in Java/C\#, +> a finalizer is a data component instead of a function. +> They are actually more similar to [finalizers in Kubernetes][k8s-finalizers]. -Yes, I know this may be confusing. -Contrary to finalizers in Java/C\#, -a finalizer is a data component instead of a function. -They are actually more similar to [finalizers in Kubernetes][k8s-finalizers]. When an entity is flagged for deletion, -dynec checks if all finalizer components for that entity have been removed; -the component data of the entity only get dropped after this check is true. +Dynec checks if all finalizer components for that entity have been removed. +If there is at least one present finalizer component for the entity, +the entity would instead be scheduled to asynchronously delete +when all finalizer components have been unset. This gives systems a chance to execute cleanup logic by reading the component data of the "terminating" entity. For example, a system that despawns deleted bullets from network players -may implement its logic like the pseudocode below: - -``` -for each `Bullet` entity just flagged for deletion: - read component `NetworkId` for the entity - broadcast despawn packet to all players - remove the `Despawn` finalizer component +may get a chance to handle bullet deletion: + +```text +for each `Bullet` entity flagged for deletion: + if `Despawn` componnent is set + read component `NetworkId` for the entity + broadcast despawn packet to all players + unset the `Despawn` finalizer component ``` Without the finalizer component, -the system would be unable to get the `NetworkId` for the despawned bullet. +the system would be unable to get the `NetworkId` for the despawned bullet +since the component has been cleaned up. Note that deletion-flagged entities are checked every tick. -To avoid impacting performance due to a growing backlog, +To avoid a growing backlog of entities to delete, finalizer components should be removed as soon as possible after deletion has been flagged. -## Choosing the component type +## Best practices -Systems that write to the same component type cannot execute together. -Furthermore, most games often need to access a single component over a loop, -so avoid putting multiple unrelated fields in the same component type. +### Small component structs + +Dynec prevents systems that write to the same component type +from executing concurrently to avoid data race. +In reality, most systems only need to access a subset of fields, +so avoid putting many unrelated fields in the same component type. Instead, prefer small, often single-field structs, unless the multiple fields are naturally related, e.g. positions/RGB values that are always accessed together. +### Optional types + Avoid using [`Option`][option] in component types; instead, use optional components to represent unused fields. -dynec uses a compact bit vector to track the existence of components, -which only takes 1 bit for each component, -while `Option` needs to preserve alignment and could take up to 64 bits -if the wrapped type requires an alignment of 64 bits (e.g. `u64`/`f64`). - -Avoid allocating heap memory for each entity component. -In other words, use of `Box`/`Vec`/etc should be avoided in component types, -because heap allocation is slow and results in memory fragmentation, -which greatly deteriorates the performance gain provided by ECS. - -[comp.simple]: https://sof3.github.io/dynec/master/dynec/comp/trait.Simple.html -[comp.simple.presence]: https://sof3.github.io/dynec/master/dynec/comp/trait.Simple.html#associatedconstant.PRESENCE -[macro.comp]: https://sof3.github.io/dynec/master/dynec/attr.comp.html -[simple-presence.required]: https://sof3.github.io/dynec/master/dynec/comp/enum.SimplePresence.html#variant.Required -[simple-presence.optional]: https://sof3.github.io/dynec/master/dynec/comp/enum.SimplePresence.html#variant.Optional -[must]: https://sof3.github.io/dynec/master/dynec/comp/trait.Must.html +By default, Dynec uses a compact bit vector to track the existence of components, +which only takes 1 bit per component. +Meanwhile, `Option` needs to preserve the alignment of `T`, +so a type like `Option` is 128 bits large +(1 bit for `None`, 63 bits for alignment padding, 64 bits for the actual data), +which is very wasteful of memory. + +### Heap-allocated types + +Minimize external (heap) memory referenced in entity components. +Heap allocation/deallocation is costly, +and the memory allocated is randomly located in the memory, +which means the CPU needs to keep loading new memory pages +into its memory cache layers +and greatly worsens performance. +Dynec stores component data in (almost) contiguous memory +and prefers processing adjacent entities in the same CPU, +so keeping all relevant data in the component structure is preferred. + +While this is inevitable for some component types like strings, +types like `Vec` can often be avoided: + +- If each entity has a similar structure of items + (i.e. `comp[0]` for entity 1 has the same logic as `comp[0]` for entity 2), + use isotope components instead. +- If the items in the vector are unstructured + (i.e. `comp[0]` for entity 1 has the same logic as `comp[1]` for entity 1), + consider turning each item into an entity and process the entity instead. + +[comp::Simple]: ../dynec/comp/trait.Simple.html +[comp::SimpleOrIsotope::PRESENCE]: ../dynec/comp/trait.SimpleOrIsotope.html#associatedconstant.PRESENCE +[macro.comp]: ../dynec/attr.comp.html +[Must]: ../dynec/comp/trait.Must.html [k8s-finalizers]: https://kubernetes.io/docs/concepts/overview/working-with-objects/finalizers/ [option]: https://doc.rust-lang.org/std/option/enum.Option.html diff --git a/book/src/ch3.2-isotope-components.md b/book/src/ch3.2-isotope-components.md index 9096dea128..0c98a75985 100644 --- a/book/src/ch3.2-isotope-components.md +++ b/book/src/ch3.2-isotope-components.md @@ -3,48 +3,62 @@ Sometimes we want to store multiple components of the same type on an entity. For example, we want to store the ingredients that make up a bullet. The straightforward approach is to use -a `Vec<(Element, Weight)>`/`HashMap`, -but this is very bad for performance and memory due to many heap allocations, -making ECS almost as slow as OOP. - -Isotope components allow us to create components dynamically. -While simple components are identified by their type, -isotope components are identified by the type along with a "discriminant" value, -which is an (optionally newtyped) `usize` that distinguishes between isotopes. +a `Vec<(Element, Ingredient)>`/`HashMap`, +but this is very bad for performance and memory due to many heap allocations. +This is where isotope components come handy. + +An isotope component works like a component that stores +a map of "discriminants" to component values. For example, in the example above, `Element` can be used as the discriminant that distinguishes between different "weight" components, -such that each `Weight` component refers to a different element. +and an entity has a separate `Ingredient` for each `Element`. Like simple components, isotope components are also archetyped, -but they implement [`comp::Isotope`][comp.isotope] instead, +but they implement [`comp::Isotope`][comp::Isotope] instead, which can also be achieved through the `#[comp]` macro: ```rust +#[derive(Discrim)] +struct Element(u16); + #[comp(of = Bullet, isotope = Element)] -struct Ingredient(Weight); +struct Ingredient(f64); ``` +Unlike vector/map simple components, +Dynec treats each discriminant as a different component +such that it has its own storage and lock mechanism, +so systems can execute in parallel +to process different discriminants of the same component. + ## Choosing the discriminant type -Since a new component storage is created for every new isotope discriminant, -the number of different discriminants must be kept finite. +Dynec creates a new component storage for every new isotope discriminant. +If you use the `storage::Vec` (the default) storage, +the space complexity is the product of +the number of entities and the number of possible discriminants. +Therefore, the number of possible discriminant values must be kept finite. + An example valid usage is to have each discriminant correspond to one item defined in the game config file, which is a realistically small number that does not grow with the game over time. +Ideally, the possible values of discriminant are generated from a 0-based auto-increment, +e.g. corresponding to the order of the item in the config file. ## Initializer -As mentioned above, isotope components are just like simple components with the type -`HashMap`. -Initializers for isotope components return -iterators of (discriminant, value) tuples instead. +Similar to simple components, isotope components can also have an auto-initializer. +However, new discriminants may be introduced after entity creation, +so isotopes cannot be exhaustively initialized during entity creation +but initialized when new discriminants are added instead. +Therefore, isotope auto-initializers cannot depend on any other values. + +## Presence -Since the returned iterator involves dynamic discriminant values, -it is not possible to implement [`comp::Must`][must] for isotope components automatically. -Nevertheless, if the user is sure that all discriminants are populated -in the initializer through exhausting the domain of discriminants, -they can implement this trait manually. +Isotope components can also have a `Required` presence like simple components. +However, since discriminants are dynamically introduced, +it is not possible to initialize an entity with all possible discriminants exhaustively. +An isotope component can be `Required` as long as it has an auto-initializer. -[comp.isotope]: https://sof3.github.io/dynec/master/dynec/comp/trait.Isotope.html -[must]: https://sof3.github.io/dynec/master/dynec/comp/trait.Must.html +[comp::Isotope]: ../dynec/comp/trait.Isotope.html diff --git a/book/src/ch4-systems.md b/book/src/ch4-systems.md index 0c1694728b..37dd6e866a 100644 --- a/book/src/ch4-systems.md +++ b/book/src/ch4-systems.md @@ -1,8 +1,7 @@ # Systems Systems contain the actual code that process components. - -Systems can be easily created using the [`#[system]`][macro.system] macro: +A system can be created using the [`#[system]`][macro.system] macro: ```rust use dynec::system; @@ -19,7 +18,7 @@ with the associated functions `hello_world::call()` and `hello_world.build()`. `call` calls the original function directly, while `build()` creates a system descriptor that can be passed to a world builder. -We can package this system into a "bundle" +We can package this system into a "bundle": ```rust use dynec::world; @@ -28,13 +27,13 @@ pub struct MyBundle; impl world::Bundle for Bundle { fn register(&mut self, builder: &mut world::Builder) { - builder.schedule(Box::new(hello_world.build())); + builder.schedule(hello_world.build()); // schedule more systems here } } ``` -Then users can add the whole bundle into their world: +Then users can add the bundle into their world: ```rust let mut world = dynec::new([ @@ -43,7 +42,7 @@ let mut world = dynec::new([ ]); ``` -Alternatively, for non-distribution scenarios (e.g. unit testing), +Alternatively, in unit tests, the [`system_test!`][system_test] macro can be used: ```rust @@ -61,10 +60,18 @@ event_loop.run(|| { }) ``` +## Ticking + Since dynec is just a platform-agnostic ECS framework, -it does not integrate with any GUI frameworks to execute the main loop directly. +it does not integrate with any GUI or scheduler frameworks to execute the main loop. Usually it is executed at the same rate as the world simulation, screen rendering or turns (for turn-based games), depending on your requirements. +It is advisable to keep latency-sensitive operations out of the main loop, +i.e. do not process them directly with the Dynec scheduler +so that the world tick rate does not become a necessary latency bottleneck. +Dynec systems are designed for ticked simulation, not event handling; +event handlers may interact with the ticked world through non-blocking channels. + [macro.system]: https://sof3.github.io/dynec/master/dynec/attr.system.html [system_test]: https://sof3.github.io/dynec/master/dynec/macro.system_test.html diff --git a/book/src/ch4.1-states.md b/book/src/ch4.1-states.md index 3b45446065..98d589c16d 100644 --- a/book/src/ch4.1-states.md +++ b/book/src/ch4.1-states.md @@ -1,94 +1,120 @@ # Parameter, local and global states -## Local states +## Parameter states -Systems can persist values over multiple executions, -known as "local states": +A system may request parameters when building: ```rust #[system] fn hello_world( - #[dynec(local(initial = 0))] counter: &mut i32, + #[dynec(param)] counter: &mut i32, ) { *counter += 1; - println!("counter = {counter}"); + println!("{counter}"); } + +builder.schedule(hello_world.build(123)); +builder.schedule(hello_world.build(456)); + +// ... +world.execute(dynec::tracer::Noop); // prints 124 and 457 in unspecified order +world.execute(dynec::tracer::Noop); // prints 125 and 458 in unspecified order ``` -`0` is the initial value of `counter` before the system is run the first time. The parameter type must be a reference (`&T` or `&mut T`) to the actual stored type. -Calling `world.execute()` in a row will print the following: +Each `#[dynec(param)]` parameter in `hello_world` +must be a reference (`&T` or `&mut T`), +adds a new parameter of type `T` +to the generated `build()` method in the order they are specified, +with the reference part stripped. -```text -counter = 1 -counter = 2 -counter = 3 -... -``` +Parameter states, along with other states, may be mutated when the system is run. +Each system (each instance returned by `build()`) maintains its own states. -## Parameter states +## Local states -The initial value can be passed as a parameter instead: +Unlike parameter states, local states are defined by the system itself +and is not specified through the `build()` function. ```rust #[system] fn hello_world( - #[dynec(param)] counter: &mut i32, + #[dynec(local(initial = 0))] counter: &mut i32, ) { *counter += 1; + println!("{counter}"); } -// ... +builder.schedule(hello_world.build()); +builder.schedule(hello_world.build()); -builder.schedule(Box::new(hello_world.build(123))); +// ... +world.execute(dynec::tracer::Noop); // prints 1, 1 in unspecified order +world.execute(dynec::tracer::Noop); // prints 2, 2 in unspecified order ``` -The arguments to `.build()` are all `#[param]` parameters in the order they are defined. +`0` is the initial value of `counter` before the system is run the first time. +If parameter states are defined in the function, +the `initial` expression may use such parameters by name as well. ## Global states -States can be shared between multiple systems, identified by their type. -Such types must implement the [`Global`][trait.global] trait, +States can also be shared among multiple systems +using the type as the identifier. +Such types must implement the [`Global`][trait.Global] trait, which can be done through the [`#[global]`][attr.global] macro: ```rust -#[dynec::global(initial = Self::default())] #[derive(Default)] +#[dynec::global(initial = Self::default())] struct MyCounter { value: i32, } #[system] -fn hello_world( +fn add_counter( #[dynec(global)] counter: &mut MyCounter, ) { counter.value += 1; } + +#[system] +fn print_counter( + #[dynec(global)] counter: &MyCounter, +) { + println!("{counter}"); +} ``` -The initial value of a global state can also be assigned -in [`Bundle::register`][bundle.register] instead -if it is not specified in the `#[dynec::global]`: +If no `initial` value is specified in `#[global]`, +the initial value of a global state must be assigned +in [`Bundle::register`][Bundle::register]. ```rust impl world::Bundle for Bundle { fn register(&mut self, builder: &mut world::Builder) { - builder.schedule(Box::new(hello_world.build())); + builder.schedule(add_counter.build()); + builder.schedule(print_counter.build()); builder.global(MyCounter { value: 123 }); } } + +// ... +world.execute(dynec::tracer::Noop); // prints 123 or 124 based on unspecified order +world.execute(dynec::tracer::Noop); // prints 124 or 125 based on unspecified order ``` -The program panics if `Bundle::register` does not initialize all global states. +The program panics if some used global states do not have an `initial` +but `Bundle::register` does not initialize them. Note that `&T` and `&mut T` are semantically different for global states. Multiple systems requesting `&T` for the same `T` may run in parallel in a multi-threaded runtime, but when a system requesting `&mut T` is running, -all other `&T` and `&mut T`-requesting systems are unschedulable +all other systems requesting `&T` or `&mut T` cannot run until the system is complete (but other unrelated systems can still be scheduled). -[trait.global]: https://sof3.github.io/dynec/master/dynec/trait.Global.html -[attr.global]: https://sof3.github.io/dynec/master/dynec/attr.global.html -[bundle.register]: https://sof3.github.io/dynec/master/dynec/world/trait.Bundle.html#method.register +[trait.Global]: ../dynec/trait.Global.html +[attr.global]: ../dynec/attr.global.html +[Bundle::register]: ../dynec/world/trait.Bundle.html#method.register diff --git a/book/src/ch4.2-component-access.md b/book/src/ch4.2-component-access.md index 7fe4938b15..14fa438c87 100644 --- a/book/src/ch4.2-component-access.md +++ b/book/src/ch4.2-component-access.md @@ -1,11 +1,11 @@ # Component access As the name "ECS" implies, -the most important feature is to access the "E" and "C" from the "S". +the most important feature is to manipulate the "E" and "C" from the "S". ## Accessing simple components -Simple components can be accessed with [`ReadSimple`][read.simple] or [`WriteSimple`][write.simple]. +Simple components can be accessed through [`ReadSimple`][ReadSimple] or [`WriteSimple`][WriteSimple]. First we declare the components we need, similar to in the previous chapters: ```rust @@ -25,12 +25,124 @@ Therefore we request reading velocity and writing position: ```rust #[system] fn motion( - mut position_acc: impl system::WriteSimple, - velcity_acc: impl system::ReadSimple, + mut position_acc: system::WriteSimple, + velocity_acc: system::ReadSimple, ) { + // work with position_acc and velocity_acc +} +``` + +We will go through how to work with the data later. + +When a system that requests `WriteSimple` is running for some `A` and `C`, +all other systems that request `ReadSimple` or `WriteSimple` +cannot run until the system is complete. +Therefore, if you only need to read the data, +use `ReadSimple` instead of `WriteSimple` even though +the latter provides all abilities that the former can provide. + +## Accessing isotope components + +Isotope components are slightly more complex. +A system may request access to +some ("partial access") or all ("full access") discriminants for an isotope component. + +Full access allows the system to read/write any discriminants for the isotope type, +and lazily initializes new discriminants if they were not encountered before. +Therefore, when a system using `WriteIsotopeFull` is running, +all other systems that access the same component in any way (read/write and full/partial) +cannot run until the system is complete; +when a system using `ReadIsotopeFull` is running, +all other systems that use `WriteIsotopeFull` or `WriteIsotopePartial` +on the same component cannot run until the system is complete. + +The usage syntax of full accessors is similar to simple accessors: + +```rust +#[system] +fn add( + weights: ReadIsotopeFull, + mut volumes: WriteIsotopeFull, +) { + // ... +} +``` + +Partial access only requests specific discriminants for the isotope type. +The requested discriminants are specified through an attribute: + +```rust +#[system] +fn add( + #[dynec(param)] &element: &Element, + #[dynec(isotope(discrim = [element]))] + weights: ReadIsotopePartial, + #[dynec(isotope(discrim = [element]))] + mut volumes: WriteIsotopePartial, +) { + // ... +} +``` + +The `discrim` attribute option lets us specify which discriminants to access. +The expression can reference the initial values of parameter states. +However, mutating parameter states will *not* change +the discriminants requested by the isotope. +The third type parameter to `ReadIsotopePartial`/`WriteIsotopePartial` +is the type of the expression passed to `discrim`. + +Since a partial accessor can only interact with specific discriminants, +multiple systems using `WriteIsotopePartial` on the same component type +can run concurrently if they request a disjoint set of discriminants. + +## Iterating over entities + +The recommended way to process all entities with accessors is +to use the [`EntityIterator`][EntityIterator] API. +`EntityIterator` contains the list of initialized entities +stored in an efficient lookup format, +useful for performing bulk operations over all entities. + +An `EntityIterator` can be joined with multiple accessors +to execute code on each entity efficiently: + +```rust +#[system] +fn move_entities( + entities: system::EntityIterator, + position_acc: system::WriteSimple, + velocity_acc: system::WriteSimple, +) { + for (_entity, (position, velocity)) in entities.entities_with(( + &mut position_acc, + &velocity_acc, + )) { + *position += velocity; + } +} +``` +`entities_with` also supports isotope accessors, +but they must be split for a specific discriminant first: + +```rust +#[system] +fn move_entities( + #[dynec(param)] &element: &Element, + entities: system::EntityIterator, + velocity_acc: system::WriteSimple, + #[dynec(isotope(discrim = [element]))] + weights_acc: system::ReadIsotopePartial, +) { + for (_entity, (position, weight)) in entities.entities_with(( + &mut position_acc, + &weights_acc.split([element])[0], + )) { + *velocity += weight; + } } ``` -[read.simple]: https://sof3.github.io/dynec/master/dynec/system/trait.ReadSimple.html -[write.simple]: https://sof3.github.io/dynec/master/dynec/system/trait.WriteSimple.html +[ReadSimple]: ../dynec/system/type.ReadSimple.html +[WriteSimple]: ../dynec/system/type.WriteSimple.html +[EntityIterator]: ../dynec/system/iter/struct.EntityIterator.html diff --git a/codegen/src/accessors.rs b/codegen/src/accessors.rs deleted file mode 100644 index 1f92dbc75d..0000000000 --- a/codegen/src/accessors.rs +++ /dev/null @@ -1,138 +0,0 @@ -use proc_macro2::TokenStream; -use quote::quote; -use syn::parse::{Parse, ParseStream}; -use syn::punctuated::Punctuated; - -use crate::util::Result; - -pub(crate) fn imp(input: TokenStream) -> Result { - let Inputs(inputs) = syn::parse2(input)?; - - let mut output = TokenStream::new(); - - for Input { crate_name, meta, vis, ident, fields, .. } in inputs { - let crate_name = match crate_name { - Some((_, crate_name)) => crate_name, - None => quote!(::dynec), - }; - - let field_ty: Vec<_> = - (0..fields.len()).map(|i| quote::format_ident!("__T{}", i)).collect(); - let field_meta: Vec<_> = fields - .iter() - .map(|field| { - let meta = &field.meta; - quote!(#(#meta)*) - }) - .collect(); - let field_vis: Vec<_> = fields.iter().map(|field| &field.vis).collect(); - let field_ident: Vec<_> = fields.iter().map(|field| &field.ident).collect(); - - let item = quote! { - #(#meta)* - #vis struct #ident<#(#field_ty),*> { - #(#field_meta #field_vis #field_ident: #field_ty,)* - } - - unsafe impl<__Arch: #crate_name::Archetype, #(#field_ty),*> - #crate_name::system::Accessor<__Arch> - for #ident<#(#field_ty,)*> - where - #(#field_ty: #crate_name::system::Accessor<__Arch>,)* - { - type Entity<'t> = #ident< - #(<#field_ty as #crate_name::system::Accessor<__Arch>>::Entity<'t>,)* - > where Self: 't; - unsafe fn entity<'this, 'e, 'ret>(this: &'this mut Self, entity: #crate_name::entity::TempRef<'e, __Arch>) -> Self::Entity<'ret> { - #ident { - #(#field_ident: <#field_ty as #crate_name::system::Accessor<__Arch>>::entity( - &mut this.#field_ident, - entity, - ),)* - } - } - } - - unsafe impl<__Arch: #crate_name::Archetype, #(#field_ty),*> - #crate_name::system::ChunkedAccessor<__Arch> - for #ident<#(#field_ty,)*> - where - #(#field_ty: #crate_name::system::ChunkedAccessor<__Arch>,)* - { - type Chunk<'t> = #ident< - #(<#field_ty as #crate_name::system::ChunkedAccessor<__Arch>>::Chunk<'t>,)* - > where Self: 't; - unsafe fn chunk<'this, 'e, 'ret>(this: &'this mut Self, chunk: #crate_name::entity::TempRefChunk<'e, __Arch>) -> Self::Chunk<'ret> { - #ident { - #(#field_ident: <#field_ty as #crate_name::system::ChunkedAccessor<__Arch>>::chunk( - &mut this.#field_ident, - chunk, - ),)* - } - } - } - }; - output.extend(item); - } - - Ok(output) -} - -struct Inputs(Vec); - -impl Parse for Inputs { - fn parse(input: ParseStream) -> Result { - let mut inputs = Vec::new(); - while !input.is_empty() { - inputs.push(input.parse()?); - } - Ok(Self(inputs)) - } -} - -struct Input { - crate_name: Option<(syn::Token![@], TokenStream)>, - meta: Vec, - vis: syn::Visibility, - ident: syn::Ident, - _braces: syn::token::Brace, - fields: Punctuated, -} - -impl Parse for Input { - fn parse(input: ParseStream) -> Result { - let crate_name = if input.peek(syn::Token![@]) { - let at = input.parse()?; - let crate_name = input.parse()?; - Some((at, crate_name)) - } else { - None - }; - - let meta = input.call(syn::Attribute::parse_outer)?; - let vis = input.parse()?; - let ident = input.parse()?; - - let inner; - let braces = syn::braced!(inner in input); - let fields = Punctuated::parse_terminated(&inner)?; - - Ok(Self { crate_name, meta, vis, ident, _braces: braces, fields }) - } -} - -struct Field { - meta: Vec, - vis: syn::Visibility, - ident: syn::Ident, -} - -impl Parse for Field { - fn parse(input: ParseStream) -> Result { - let meta = input.call(syn::Attribute::parse_outer)?; - let vis = input.parse()?; - let ident = input.parse()?; - - Ok(Self { meta, vis, ident }) - } -} diff --git a/codegen/src/comp.rs b/codegen/src/comp.rs index 1dfedc73bf..6d9ff9a326 100644 --- a/codegen/src/comp.rs +++ b/codegen/src/comp.rs @@ -40,12 +40,6 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result }; let presence = args.find_one(|arg| option_match!(arg, ItemOpt::Required => &()))?; - if let (Some((isotope_span, _)), Some((presence_span, _))) = (isotope, presence) { - return Err(Error::new( - isotope_span.join(presence_span).unwrap_or(presence_span), - "isotope components cannot be required because new isotopes may be created dynamically", - )); - } let presence_enum = match presence { Some(_) => quote!(#crate_name::comp::Presence::Required), None => quote!(#crate_name::comp::Presence::Optional), @@ -61,6 +55,13 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result let finalizer = finalizer.is_some(); let init = args.find_one(|arg| option_match!(arg, ItemOpt::Init(_, func) => func))?; + if let (Some((isotope_span, _)), Some((presence_span, _)), None) = (isotope, presence, init) { + return Err(Error::new( + isotope_span.join(presence_span).unwrap_or(presence_span), + "isotope components without an auto-initializer cannot be required because new \ + isotopes may be created dynamically", + )); + } let input: syn::DeriveInput = syn::parse2(input)?; let generics = util::parse_generics(&input); diff --git a/codegen/src/lib.rs b/codegen/src/lib.rs index bf123117a9..b705eb8c16 100644 --- a/codegen/src/lib.rs +++ b/codegen/src/lib.rs @@ -2,7 +2,8 @@ use proc_macro::TokenStream; -mod accessors; +mod util; + mod archetype; mod comp; mod comps; @@ -12,11 +13,11 @@ mod global; mod system; mod tracer; mod tracer_def; -mod util; +mod zip; #[proc_macro] -pub fn accessors(input: TokenStream) -> TokenStream { - accessors::imp(input.into()).unwrap_or_else(|err| err.to_compile_error()).into() +pub fn zip(input: TokenStream) -> TokenStream { + zip::imp(input.into()).unwrap_or_else(|err| err.to_compile_error()).into() } #[proc_macro] diff --git a/codegen/src/system.rs b/codegen/src/system.rs index 7cf17f0964..0e91743eff 100644 --- a/codegen/src/system.rs +++ b/codegen/src/system.rs @@ -40,8 +40,7 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result let mut initial_state_field_defaults: Vec> = Vec::new(); let mut isotope_discrim_idents: Vec = Vec::new(); - let mut isotope_discrim_ty_params: Vec = Vec::new(); - let mut isotope_discrim_type_bounds: Vec = Vec::new(); + let mut isotope_discrim_tys: Vec> = Vec::new(); let mut isotope_discrim_values: Vec> = Vec::new(); let mut input_types: Vec = Vec::new(); @@ -157,31 +156,21 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result false => quote!(components.read_simple_storage::<#arch, #comp>()), } } - ArgType::Isotope { mutable, arch, comp, discrim, discrim_key, maybe_uninit } => { + ArgType::Isotope { mutable, arch, comp, discrim, discrim_set, maybe_uninit } => { let discrim_field = if let Some(discrim) = discrim { - let discrim_key = match discrim_key { + let discrim_set = match discrim_set { Ok(ty) => ty, Err(span) => { - return Err(Error::new( - span, - "Type parameter `K` must be specified for \ - `ReadIsotope`/`WriteIsotope` if partial isotope access is used", - )) + let discrim_ty = + quote!(<#comp as #crate_name::comp::Isotope<#arch>>::Discrim); + syn::parse_quote_spanned! { span => ::std::vec::Vec<#discrim_ty> } } }; let discrim_ident = quote::format_ident!("__dynec_isotope_discrim_{}", param_index); isotope_discrim_idents.push(discrim_ident.clone()); - isotope_discrim_ty_params.push(quote::format_ident!( - "__DynecDiscrimType{}", - isotope_discrim_ty_params.len() - )); - isotope_discrim_type_bounds.push(quote!( - #crate_name::comp::discrim::Set< - <#comp as #crate_name::comp::Isotope<#arch>>::Discrim, - Key = #discrim_key, - >)); + isotope_discrim_tys.push(discrim_set); isotope_discrim_values.push(discrim); Some(quote!(self.__dynec_isotope_discrim_idents.#discrim_ident)) @@ -228,19 +217,15 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result #no_partition_call }); - quote!(#crate_name::system::EntityCreatorImpl { - buffer: &offline_buffer, - ealloc: ealloc_shard_map.borrow::<#arch>(), - }) + quote!(#crate_name::system::EntityCreator::<#arch>::new(&offline_buffer, ealloc_shard_map.borrow::<#arch>())) } ArgType::EntityDeleter { arch } => { - quote!(#crate_name::system::EntityDeleterImpl::<#arch> { - buffer: &offline_buffer, - _ph: ::std::marker::PhantomData, - }) + quote!(#crate_name::system::EntityDeleter::<#arch>::new ( + &offline_buffer, + )) } ArgType::EntityIterator { arch } => { - quote!(#crate_name::system::entity_iterator::<#arch>(ealloc_shard_map.snapshot::<#arch>().clone())) + quote!(#crate_name::system::EntityIterator::<#arch>::new(ealloc_shard_map.snapshot::<#arch>().clone())) } }; system_run_args.push(run_arg); @@ -269,20 +254,16 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result let isotope_discrim_idents_struct = quote! { #[allow(non_camel_case_types)] - struct __dynec_isotope_discrim_idents<#( - #isotope_discrim_ty_params: #isotope_discrim_type_bounds, - )*> { - #(#isotope_discrim_idents: #isotope_discrim_ty_params,)* + struct __dynec_isotope_discrim_idents { + #(#isotope_discrim_idents: #isotope_discrim_tys,)* } }; let mut local_state_struct = syn::parse2(quote! { #[allow(non_camel_case_types)] - struct __dynec_local_state<#( - #isotope_discrim_ty_params: #isotope_discrim_type_bounds, - )*> { + struct __dynec_local_state { #(#local_state_entity_attrs #local_state_field_idents: #local_state_field_tys,)* #[not_entity = "no entities can be assigned in discriminants because the world is not created yet."] - __dynec_isotope_discrim_idents: __dynec_isotope_discrim_idents<#(#isotope_discrim_ty_params,)*>, + __dynec_isotope_discrim_idents: __dynec_isotope_discrim_idents, } }).expect("invalid struct expression"); let impl_referrer_for_local_state = entity_ref::entity_ref( @@ -349,9 +330,7 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result let impl_descriptor = quote! { #[automatically_derived] - impl<#( - #isotope_discrim_ty_params: #isotope_discrim_type_bounds, - )*> #crate_name::system::Descriptor for __dynec_local_state<#(#isotope_discrim_ty_params,)*> { + impl #crate_name::system::Descriptor for __dynec_local_state { fn get_spec(&self) -> #crate_name::system::Spec { #destructure_local_states @@ -382,9 +361,7 @@ pub(crate) fn imp(args: TokenStream, input: TokenStream) -> Result }; let impl_system = quote! { #[automatically_derived] - impl<#( - #isotope_discrim_ty_params: #isotope_discrim_type_bounds, - )*> #crate_name::system::#system_trait for __dynec_local_state<#(#isotope_discrim_ty_params,)*> { + impl #crate_name::system::#system_trait for __dynec_local_state { fn run(&mut self, #system_run_params) { let offline_buffer = ::std::cell::RefCell::new(offline_buffer); diff --git a/codegen/src/system/arg.rs b/codegen/src/system/arg.rs index 658b36d4be..eebac350eb 100644 --- a/codegen/src/system/arg.rs +++ b/codegen/src/system/arg.rs @@ -26,7 +26,7 @@ pub(super) enum ArgType { arch: Box, comp: Box, discrim: Option>, - discrim_key: Result, Span>, + discrim_set: Result, Span>, maybe_uninit: Vec, }, EntityCreator { @@ -53,8 +53,8 @@ fn simple_partial_builder(mutable: bool, maybe_uninit: Vec) -> Partia Error::new( args_span, "Cannot infer archetype and component for component access. Specify explicitly \ - with `#[dynec(simple(arch = X, comp = Y))]`, or use `impl ReadSimple`/`impl WriteSimple`.", + with `#[dynec(simple(arch = X, comp = Y))]`, or use `ReadSimple`/`WriteSimple`.", ) })?; @@ -77,21 +77,21 @@ fn isotope_partial_builder( return Err(Error::new( args_span, "Cannot infer archetype and component for component access. Specify explicitly \ - with `#[dynec(isotope(arch = X, comp = Y))]`, or use `impl ReadIsotope`/`impl WriteIsotope`.", + with `#[dynec(isotope(arch = X, comp = Y, [discrim_set = Z]))]`, or use \ + `(Read|Write)Isotope(Full|Isotope)`.", )); } let &arch = args.get(0).expect("args.len() >= 2"); let &comp = args.get(1).expect("args.len() >= 2"); - let discrim_key = args.get(2).map(|&ty| Box::new(ty.clone())).ok_or(args_span); + let discrim_set = args.get(2).map(|&ty| Box::new(ty.clone())).ok_or(args_span); Ok(ArgType::Isotope { - mutable: mutable || ident == "WriteIsotope", + mutable: mutable || ident.starts_with("WriteIsotope"), arch: Box::new(arch.clone()), comp: Box::new(comp.clone()), discrim, - discrim_key, + discrim_set, maybe_uninit, }) }) @@ -165,30 +165,21 @@ pub(super) fn infer_arg_type(param: &mut syn::PatType) -> Result { let arg_type = match maybe_partial { Some(MaybePartial::Full(arg_type)) => arg_type, arg_type => { - let impl_ty = match &*param.ty { - syn::Type::ImplTrait(ty) => ty, - _ => return Err(Error::new_spanned(¶m, USAGE_INFERENCE_ERROR)), + let syn::Type::Path(ty) = &*param.ty else { + return Err(Error::new_spanned(&*param.ty, USAGE_INFERENCE_ERROR)); }; - if impl_ty.bounds.len() != 1 { - return Err(Error::new_spanned(&impl_ty.bounds, USAGE_INFERENCE_ERROR)); - } - - let bound = match impl_ty.bounds.first().expect("bounds.len() == 1") { - syn::TypeParamBound::Trait(bound) => bound, - bound => return Err(Error::new_spanned(bound, USAGE_INFERENCE_ERROR)), - }; - - let trait_name = bound.path.segments.last().expect("path should not be empty"); + let trait_name = ty.path.segments.last().expect("path should not be empty"); let trait_name_string = trait_name.ident.to_string(); let builder = match arg_type { Some(MaybePartial::Partial(builder)) => builder, None => match trait_name_string.as_str() { "ReadSimple" | "WriteSimple" => simple_partial_builder(false, Vec::new()), - "ReadIsotope" | "WriteIsotope" => { - isotope_partial_builder(false, None, Vec::new()) - } + "ReadIsotopePartial" + | "WriteIsotopePartial" + | "ReadIsotopeFull" + | "WriteIsotopeFull" => isotope_partial_builder(false, None, Vec::new()), "EntityCreator" => entity_creator_partial_builder(false), "EntityDeleter" => entity_deleter_partial_builder(), "EntityIterator" => entity_iterator_partial_builder(), @@ -297,8 +288,8 @@ fn try_attr_to_arg_type(arg: opt::Arg, attr_span: Span, param_span: Span) -> Res let discrim = opts.find_one( |opt| option_match!(opt, opt::IsotopeArg::Discrim(_, discrim) => discrim), )?; - let discrim_key = opts - .find_one(|opt| option_match!(opt, opt::IsotopeArg::DiscrimKey(_, ty) => ty))? + let discrim_set = opts + .find_one(|opt| option_match!(opt, opt::IsotopeArg::DiscrimSet(_, ty) => ty))? .ok_or(param_span); let maybe_uninit = opts.merge_all(|opt| option_match!(opt, opt::IsotopeArg::MaybeUninit(_, tys) => tys.iter().cloned())); @@ -309,7 +300,7 @@ fn try_attr_to_arg_type(arg: opt::Arg, attr_span: Span, param_span: Span) -> Res arch: arch.clone(), comp: comp.clone(), discrim: discrim.map(|(_, discrim)| discrim.clone()), - discrim_key: discrim_key.map(|(_, ty)| ty.clone()), + discrim_set: discrim_set.map(|(_, ty)| ty.clone()), maybe_uninit, }) } diff --git a/codegen/src/system/opt.rs b/codegen/src/system/opt.rs index b4bb2f5c99..1725c4c400 100644 --- a/codegen/src/system/opt.rs +++ b/codegen/src/system/opt.rs @@ -154,7 +154,7 @@ pub(super) enum IsotopeArg { Arch(syn::Token![=], Box), Comp(syn::Token![=], Box), Discrim(syn::Token![=], Box), - DiscrimKey(syn::Token![=], Box), + DiscrimSet(syn::Token![=], Box), MaybeUninit(syn::token::Paren, Punctuated), } @@ -180,10 +180,10 @@ impl Parse for Named { let discrim = input.parse::()?; IsotopeArg::Discrim(eq, Box::new(discrim)) } - "discrim_key" => { + "discrim_set" => { let eq = input.parse::()?; let ty = input.parse::()?; - IsotopeArg::DiscrimKey(eq, Box::new(ty)) + IsotopeArg::DiscrimSet(eq, Box::new(ty)) } "maybe_uninit" => parse_maybe_uninit(input, IsotopeArg::MaybeUninit)?, _ => return Err(Error::new_spanned(&name, "Unknown option for #[dynec(isotope)]")), diff --git a/codegen/src/zip.rs b/codegen/src/zip.rs new file mode 100644 index 0000000000..d61b4727b8 --- /dev/null +++ b/codegen/src/zip.rs @@ -0,0 +1,212 @@ +use std::iter; + +use proc_macro2::{Span, TokenStream}; +use quote::quote; +use syn::parse::{Parse, ParseStream}; +use syn::punctuated::Punctuated; + +use crate::util::Result; + +pub(crate) fn imp(input: TokenStream) -> Result { + let Inputs(inputs) = syn::parse2(input)?; + + let mut output = TokenStream::new(); + + for Input { debug_print, crate_name, meta, vis, ident, fields, .. } in inputs { + let debug_print = debug_print.is_some(); + + if fields.is_empty() { + return Err(syn::Error::new( + Span::call_site(), + "zip structs must have at least one field", + )); + } + + let crate_name = match crate_name { + Some((_, _, _, crate_name)) => crate_name, + None => quote!(::dynec), + }; + + let field_tys: Vec<_> = + (0..fields.len()).map(|i| quote::format_ident!("__T{}", i)).collect(); + let field_meta: Vec<_> = fields + .iter() + .map(|field| { + let meta = &field.meta; + quote!(#(#meta)*) + }) + .collect(); + let field_vis: Vec<_> = fields.iter().map(|field| &field.vis).collect(); + let field_idents: Vec<_> = fields.iter().map(|field| &field.ident).collect(); + + let first_field_ident = field_idents.first().expect("struct checked to be non empty"); + let first_field_ty = field_tys.first().expect("struct checked to be non empty"); + let mut stacked_zip = quote!(<#first_field_ty as #crate_name::system::ZipChunked::<__Arch>>::chunk_to_entities(#first_field_ident)); + let mut stacked_tuples = quote!(#first_field_ident); + for (&field_ident, field_ty) in iter::zip(&field_idents, &field_tys).skip(1) { + stacked_zip = quote!(::core::iter::zip(#stacked_zip, <#field_ty as #crate_name::system::ZipChunked::<__Arch>>::chunk_to_entities(#field_ident))); + stacked_tuples = quote!((#stacked_tuples, #field_ident)); + } + + let item = quote! { + #(#meta)* + #vis struct #ident<#(#field_tys),*> { + #(#field_meta #field_vis #field_idents: #field_tys,)* + } + + impl<__Arch: #crate_name::Archetype, #(#field_tys),*> + #crate_name::system::IntoZip<__Arch> + for #ident<#(#field_tys,)*> + where + #(#field_tys: #crate_name::system::IntoZip<__Arch>,)* + { + type IntoZip = #ident<#(<#field_tys as #crate_name::system::IntoZip<__Arch>>::IntoZip,)*>; + + fn into_zip(self) -> Self::IntoZip { + let Self { #(#field_idents,)* } = self; + #ident { #( + #field_idents: <#field_tys as #crate_name::system::IntoZip<__Arch>>::into_zip(#field_idents), + )* } + } + } + + impl<__Arch: #crate_name::Archetype, #(#field_tys),*> + #crate_name::system::Zip<__Arch> + for #ident<#(#field_tys,)*> + where + #(#field_tys: #crate_name::system::Zip<__Arch>,)* + { + fn split(&mut self, offset: __Arch::RawEntity) -> Self { + let Self { #(#field_idents,)* } = self; + #ident { #( + #field_idents: <#field_tys as #crate_name::system::Zip<__Arch>>::split(#field_idents, offset), + )* } + } + + type Item = #ident< + #(<#field_tys as #crate_name::system::Zip<__Arch>>::Item,)* + >; + fn get>(self, __dynec_entity: E) -> Self::Item { + let Self { #(#field_idents,)* } = self; + let __dynec_entity = #crate_name::entity::TempRef::<__Arch>::new(__dynec_entity.id()); + #ident { #( + #field_idents: <#field_tys as #crate_name::system::Zip<__Arch>>::get( + #field_idents, + __dynec_entity, + ), + )* } + } + } + + impl<__Arch: #crate_name::Archetype, #(#field_tys),*> + #crate_name::system::ZipChunked<__Arch> + for #ident<#(#field_tys,)*> + where + #(#field_tys: #crate_name::system::ZipChunked<__Arch>,)* + { + type Chunk = #ident< + #(<#field_tys as #crate_name::system::ZipChunked<__Arch>>::Chunk,)* + >; + fn get_chunk(self, __dynec_chunk: #crate_name::entity::TempRefChunk<__Arch>) -> Self::Chunk { + let Self { #(#field_idents,)* } = self; + #ident { #( + #field_idents: <#field_tys as #crate_name::system::ZipChunked<__Arch>>::get_chunk( + #field_idents, + __dynec_chunk, + ), + )* } + } + + fn chunk_to_entities(chunk: Self::Chunk) -> impl Iterator>::Item,)* + >> { + let #ident { #(#field_idents,)* } = chunk; + #stacked_zip.map(|#stacked_tuples| #ident { #(#field_idents,)* }) + } + } + }; + + if debug_print { + println!("{item}"); + } + output.extend(item); + } + + Ok(output) +} + +struct Inputs(Vec); + +impl Parse for Inputs { + fn parse(input: ParseStream) -> Result { + let mut inputs = Vec::new(); + while !input.is_empty() { + inputs.push(input.parse()?); + } + Ok(Self(inputs)) + } +} + +struct Input { + debug_print: Option<(syn::Token![@], kw::__debug_print)>, + crate_name: Option<(syn::Token![@], kw::dynec_as, syn::token::Paren, TokenStream)>, + meta: Vec, + vis: syn::Visibility, + ident: syn::Ident, + _braces: syn::token::Brace, + fields: Punctuated, +} + +impl Parse for Input { + fn parse(input: ParseStream) -> Result { + let mut debug_print = None; + let mut crate_name = None; + + while input.peek(syn::Token![@]) { + let at = input.parse()?; + let lh = input.lookahead1(); + if lh.peek(kw::__debug_print) { + debug_print = Some((at, input.parse()?)); + } else if lh.peek(kw::dynec_as) { + let kw = input.parse()?; + let inner; + let paren = syn::parenthesized!(inner in input); + let crate_name_ts = inner.parse()?; + crate_name = Some((at, kw, paren, crate_name_ts)); + } else { + return Err(lh.error()); + } + } + + let meta = input.call(syn::Attribute::parse_outer)?; + let vis = input.parse()?; + let ident = input.parse()?; + + let inner; + let braces = syn::braced!(inner in input); + let fields = Punctuated::parse_terminated(&inner)?; + + Ok(Self { debug_print, crate_name, meta, vis, ident, _braces: braces, fields }) + } +} + +struct Field { + meta: Vec, + vis: syn::Visibility, + ident: syn::Ident, +} + +impl Parse for Field { + fn parse(input: ParseStream) -> Result { + let meta = input.call(syn::Attribute::parse_outer)?; + let vis = input.parse()?; + let ident = input.parse()?; + + Ok(Self { meta, vis, ident }) + } +} + +mod kw { + syn::custom_keyword!(dynec_as); + syn::custom_keyword!(__debug_print); +} diff --git a/examples/graph/main.rs b/examples/graph/main.rs index 6b399586a0..551253c4b1 100644 --- a/examples/graph/main.rs +++ b/examples/graph/main.rs @@ -28,11 +28,9 @@ fn main() { // because we cannot iterate and get at the same time. // This limitation only applies to offline mode; // in an online system, use the EntityIterator API. - let nodes: HashMap<_, _> = world - .components - .iter_simple::() - .map(|(entity, which)| (*which, world.rctrack.to_strong(entity))) - .collect(); + let storage = world.components.get_simple_storage::(); + let nodes: HashMap<_, _> = + storage.iter().map(|(entity, which)| (*which, world.rctrack.to_strong(entity))).collect(); let crops_in_farm = world .components diff --git a/src/comp/any.rs b/src/comp/any.rs index a91eb18f83..d7df090504 100644 --- a/src/comp/any.rs +++ b/src/comp/any.rs @@ -106,7 +106,7 @@ pub(crate) trait DepGetterInner { } macro_rules! impl_simple_init_fn { - ($($deps:ident),* $(,)?) => { + ($($deps:ident)*) => { impl< A: Archetype, C: comp::SimpleOrIsotope, $($deps: comp::Simple,)* @@ -142,17 +142,37 @@ macro_rules! impl_simple_init_fn { } macro_rules! impl_simple_init_fn_accumulate { - () => { - impl_simple_init_fn!(); + ($feature:literal $first:ident $($rest:tt)*) => { + impl_simple_init_fn_accumulate!($feature $($rest)*); + #[cfg(feature = $feature)] + impl_simple_init_fn_accumulate!(@MIXED $first $($rest)*); + }; + ($outer_feature:literal $inner_feature:literal $($rest:tt)*) => { + impl_simple_init_fn_accumulate!($inner_feature $($rest)*); + }; + ($outer_feature:literal @ALWAYS $($rest:tt)*) => { + impl_simple_init_fn_accumulate!(@ALWAYS $($rest)*); + }; + (@ALWAYS $first:ident $($rest:tt)*) => { + impl_simple_init_fn_accumulate!(@ALWAYS $($rest)*); + impl_simple_init_fn!($first $($rest)*); + }; + (@ALWAYS) => { + #[allow(unused_variables)] + const _: () = { + impl_simple_init_fn!(); + }; + }; + (@MIXED $($idents_front:ident)* $($feature:literal $($idents_feature:ident)*)* @ALWAYS $($idents_always:ident)*) => { + impl_simple_init_fn!($($idents_front)* $($($idents_feature)*)* $($idents_always)*); }; - ($first:ident $(, $rest:ident)* $(,)?) => { - impl_simple_init_fn_accumulate!($($rest),*); - impl_simple_init_fn!($first $(, $rest)*); - } } impl_simple_init_fn_accumulate!( - P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11, P12, P13, P14, P15, P16, P17, P18, P19, P20, P21, - P22, P23, P24, P25, P26, P27, P28, P29, P30, P31, P32, + "tuple-impl-32-init-fn" T1 T2 T3 T4 T5 T6 T7 T8 + "tuple-impl-24-init-fn" T9 T10 T11 T12 T13 T14 T15 T16 + "tuple-impl-16-init-fn" T17 T18 T19 T20 T21 T22 T23 T24 + "tuple-impl-8-init-fn" T25 T26 T27 T28 + @ALWAYS T29 T30 T31 T32 ); #[cfg(test)] diff --git a/src/comp/discrim.rs b/src/comp/discrim.rs index 4a2bc89812..2ead120c34 100644 --- a/src/comp/discrim.rs +++ b/src/comp/discrim.rs @@ -140,7 +140,7 @@ pub trait Mapped { /// /// This is only used when storages of all isotopes are read/written in the same accessor /// (through [`Components::read_full_isotope_storage`][read_full_isotope_storage], -/// or `impl system::ReadIsotope` without `#[dynec(isotope(discrim = xxx))]`). +/// or [`system::ReadIsotopeFull`](crate::system::ReadIsotopeFull)))]`). /// /// [read_full_isotope_storage]: crate::world::Components::read_full_isotope_storage pub trait FullMap diff --git a/src/entity.rs b/src/entity.rs index c49c98097d..8ce71c223a 100644 --- a/src/entity.rs +++ b/src/entity.rs @@ -57,8 +57,8 @@ pub struct TempRef<'t, A: Archetype> { } impl<'t, A: Archetype> TempRef<'t, A> { - /// Creates a new TemporaryRef with a lifetime. - pub(crate) fn new(value: A::RawEntity) -> Self { Self { value, _ph: PhantomData } } + /// Creates a new temporary reference with a lifetime. + pub fn new(value: A::RawEntity) -> Self { Self { value, _ph: PhantomData } } } impl<'t, A: Archetype> sealed::Sealed for TempRef<'t, A> {} @@ -74,6 +74,8 @@ impl<'t, A: Archetype> Clone for TempRef<'t, A> { impl<'t, A: Archetype> Copy for TempRef<'t, A> {} /// A chunk of continuous [`TempRef`]s. +/// +/// Methods on this type imitate the API of a [`&[TempRef]`] slice. // Instantiations of this struct must guarantee that all entities in `start..end` // satisfy the presence invariants for the duration of the lifetime `'t`. pub struct TempRefChunk<'t, A: Archetype> { @@ -88,6 +90,17 @@ impl<'t, A: Archetype> TempRefChunk<'t, A> { Self { start, end, _ph: PhantomData } } + /// Gets the entity by a 0-based offset in the chunk. + /// + /// Returns `None` if the offset is out of bounds. + pub fn get(&self, offset: usize) -> Option> { + if self.start.add(offset) < self.end { + Some(TempRef::new(self.start.add(offset))) + } else { + None + } + } + /// Iterates over all entities in the chunk. pub fn iter(&self) -> impl Iterator> + '_ { iter::successors(Some(self.start), |prev| { diff --git a/src/entity/ealloc.rs b/src/entity/ealloc.rs index ceb27aac83..e7b0b58e49 100644 --- a/src/entity/ealloc.rs +++ b/src/entity/ealloc.rs @@ -1,6 +1,6 @@ //! Manages entity ID allocation and deallocation. -use std::any::{Any, TypeId}; +use std::any::{type_name, Any, TypeId}; use std::cell::{self, RefCell}; use std::collections::HashMap; use std::{iter, ops}; @@ -60,8 +60,14 @@ pub trait Ealloc: 'static { /// Flushes the queued operations after joining. fn flush(&mut self); + + /// Marks that the allocator needs to flush before executing the next system. + fn mark_need_flush(&mut self); + /// Flush and reset the mark if `mark_need_flush` was called since the last flush. + fn flush_if_marked(&mut self); } +// Object-safe version of [`Ealloc`]. pub(crate) trait AnyEalloc { fn as_any_mut(&mut self) -> &mut dyn Any; @@ -70,6 +76,9 @@ pub(crate) trait AnyEalloc { fn snapshot(&self) -> Box; fn flush(&mut self); + + fn mark_need_flush(&mut self); + fn flush_if_marked(&mut self); } impl AnyEalloc for T { @@ -82,6 +91,9 @@ impl AnyEalloc for T { fn snapshot(&self) -> Box { Box::new(Ealloc::snapshot(self)) } fn flush(&mut self) { Ealloc::flush(self); } + + fn mark_need_flush(&mut self) { Ealloc::mark_need_flush(self) } + fn flush_if_marked(&mut self) { Ealloc::flush_if_marked(self) } } // Default allocator @@ -181,6 +193,22 @@ impl Map { pub fn snapshot(&mut self) -> Snapshot { Ealloc::snapshot(self.get::()) } + + /// Marks that an archetype has been modified between ticks and shall be flushed. + pub(crate) fn mark_need_flush(&mut self) { + let Some(ealloc) = self.map.get_mut(&TypeId::of::()) else { + panic!("Archetype {} is not used in any systems", type_name::()) + }; + + ealloc.mark_need_flush(); + } + + /// Flush all archetypes that have been [marked for flush](Self::mark_need_flush). + pub(crate) fn flush_if_marked(&mut self) { + for ealloc in self.map.values_mut() { + ealloc.flush_if_marked(); + } + } } struct ShardMapEntry { @@ -195,6 +223,10 @@ pub struct ShardMap { map: HashMap, } +/// Return value of [`ShardMap::borrow`]. +pub type BorrowedShard<'t, A: Archetype> = impl ops::DerefMut::AllocHint>> + + 't; + impl ShardMap { /// Gets the mutable shard reference. pub fn get( @@ -213,11 +245,7 @@ impl ShardMap { } /// Borrows the shard for an archetype through a [`RefCell`]. - pub fn borrow( - &self, - ) -> impl ops::DerefMut< - Target = impl Shard::AllocHint>, - > + '_ { + pub fn borrow(&self) -> BorrowedShard { let shard = self.map.get(&TypeId::of::()).expect("Use of unregistered archetype"); let shard = shard .cell diff --git a/src/entity/ealloc/recycling.rs b/src/entity/ealloc/recycling.rs index 0cf2557805..8e30defe59 100644 --- a/src/entity/ealloc/recycling.rs +++ b/src/entity/ealloc/recycling.rs @@ -15,28 +15,31 @@ type MutableShards = Vec>>; /// The default allocator supporting atomically-allocated new IDs and arbitrary recycler. #[derive(Debug)] -pub struct Recycling, S: ShardAssigner> { +pub struct Recycling, S: ShardAssigner> { + /// Whether `mark_need_flush` was called. + flush_mark: bool, /// The next ID to allocate into shards. - global_gauge: Arc, + global_gauge: Arc, /// A sorted list of recycled IDs during the last join. - recyclable: Arc>, + recyclable: Arc>, /// The actual IDs assigned to different shards. recycler_shards: MutableShards, /// The assigned shard. shard_assigner: S, /// The queue of deallocated IDs to distribute. - dealloc_queue: Vec, + dealloc_queue: Vec, /// The queue of allocated IDs during online, to be synced to recyclable after join. - reuse_queue_shards: MutableShards>, + reuse_queue_shards: MutableShards>, } -impl, S: ShardAssigner> Recycling { +impl, S: ShardAssigner> Recycling { /// Creates a new recycling allocator with a custom shard assigner. /// This can only be used for unit testing since the Archetype API does not support dynamic /// shard assigners. pub(crate) fn new_with_shard_assigner(num_shards: usize, shard_assigner: S) -> Self { - let global_gauge = E::new(); + let global_gauge = RawT::new(); Self { + flush_mark: false, global_gauge: Arc::new(global_gauge), recyclable: Arc::default(), recycler_shards: (0..num_shards).map(|_| Arc::default()).collect(), @@ -52,24 +55,24 @@ impl, S: ShardAssigner> Recycling { } fn get_reuse_queue_offline( - reuse_queues: &mut MutableShards>, + reuse_queues: &mut MutableShards>, index: usize, - ) -> &mut Vec { + ) -> &mut Vec { let arc = reuse_queues.get_mut(index).expect("index out of bounds"); Arc::get_mut(arc).expect("shards are dropped in offline mode").get_mut() } fn iter_allocated_chunks_offline( &mut self, - ) -> impl iter::FusedIterator> + '_ { + ) -> impl iter::FusedIterator> + '_ { iter_gaps(self.global_gauge.load(), self.recyclable.iter().copied()) } } -impl, S: ShardAssigner> Ealloc for Recycling { - type Raw = E; +impl, S: ShardAssigner> Ealloc for Recycling { + type Raw = RawT; type AllocHint = T::Hint; - type Shard = impl Shard; + type Shard = impl Shard; fn new(num_shards: usize) -> Self { Self::new_with_shard_assigner(num_shards, S::default()) } @@ -106,9 +109,11 @@ impl, S: ShardAssigner> Ealloc for Recycling { shard.allocate(hint) } - fn queue_deallocate(&mut self, id: E) { self.dealloc_queue.push(id); } + fn queue_deallocate(&mut self, id: RawT) { self.dealloc_queue.push(id); } fn flush(&mut self) { + self.flush_mark = false; + let mut ids = &self.dealloc_queue[..]; { let recyclable = Arc::get_mut(&mut self.recyclable) @@ -156,6 +161,13 @@ impl, S: ShardAssigner> Ealloc for Recycling { self.dealloc_queue.clear(); } + + fn mark_need_flush(&mut self) { self.flush_mark = true; } + fn flush_if_marked(&mut self) { + if self.flush_mark { + self.flush(); + } + } } fn distribute_sorted(sizes: &mut [usize], total: usize) { @@ -193,18 +205,18 @@ pub struct RecyclingShard { reuse_queue: ReuseQueueRef, } -impl Shard +impl Shard for RecyclingShard where - GaugeRef: ops::Deref + Send + 'static, + GaugeRef: ops::Deref + Send + 'static, RecyclerRef: ops::DerefMut + Send + 'static, - ::Target: Recycler, - ReuseQueueRef: ops::DerefMut> + Send + 'static, + ::Target: Recycler, + ReuseQueueRef: ops::DerefMut> + Send + 'static, { - type Raw = E; - type Hint = >::Hint; + type Raw = RawT; + type Hint = >::Hint; - fn allocate(&mut self, hint: Self::Hint) -> E { + fn allocate(&mut self, hint: Self::Hint) -> RawT { if let Some(id) = self.recycler.poll(hint) { id } else { @@ -213,14 +225,14 @@ where } } -impl, GaugeRef, RecyclerRef, ReuseQueueRef> +impl, GaugeRef, RecyclerRef, ReuseQueueRef> RecyclingShard where - GaugeRef: ops::Deref, + GaugeRef: ops::Deref, RecyclerRef: ops::DerefMut, - ReuseQueueRef: ops::DerefMut>, + ReuseQueueRef: ops::DerefMut>, { - fn allocate(&mut self, hint: T::Hint) -> E { + fn allocate(&mut self, hint: T::Hint) -> RawT { if let Some(id) = self.recycler.poll(hint) { self.reuse_queue.push(id); id diff --git a/src/entity/ealloc/snapshot.rs b/src/entity/ealloc/snapshot.rs index bd600e1ef2..f33c60fc06 100644 --- a/src/entity/ealloc/snapshot.rs +++ b/src/entity/ealloc/snapshot.rs @@ -11,7 +11,7 @@ use crate::entity::Raw; // TODO change this into a trait to allow non-recycling ealloc. // TODO make this a trait so that offline access does not need to clone the entire recyclable set. /// A snapshot of the allocated entities during offline. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct Snapshot { pub(super) gauge: E, pub(super) recyclable: Arc>, @@ -32,7 +32,7 @@ impl Snapshot { } } -#[derive(Clone, Copy)] +#[derive(Debug, Clone, Copy)] pub(crate) struct Slice<'t, E> { pub(crate) start: E, pub(crate) end: E, @@ -46,7 +46,7 @@ impl<'t, E: Raw> Slice<'t, E> { // For now, we just take the assumption that the holes are uniformly distributed. let midpt = self.start.approx_midpoint(self.end); - let is_far = self.end.sub(midpt) < 8; + let is_far = self.end.sub(midpt) >= 8; is_far.then_some(midpt) } diff --git a/src/entity/raw.rs b/src/entity/raw.rs index eba0ff1772..7d4879be74 100644 --- a/src/entity/raw.rs +++ b/src/entity/raw.rs @@ -1,5 +1,4 @@ -use std::num::NonZeroU32; -use std::sync::atomic::{AtomicU32, Ordering}; +use std::sync::atomic; use std::{fmt, ops}; use crate::util::UnsafeEqOrd; @@ -52,39 +51,6 @@ pub trait Raw: Sized + Send + Sync + Copy + fmt::Debug + UnsafeEqOrd + 'static { fn range(range: ops::Range) -> Self::Range; } -impl Raw for NonZeroU32 { - type Atomic = AtomicU32; - - fn new() -> Self::Atomic { AtomicU32::new(1) } - - fn add(self, count: usize) -> Self { - let count: u32 = count.try_into().expect("count is too large"); - NonZeroU32::new(self.get() + count).expect("integer overflow") - } - - fn sub(self, other: Self) -> usize { - (self.get() - other.get()).try_into().expect("usize >= u32") - } - - fn approx_midpoint(self, other: Self) -> Self { - NonZeroU32::new((self.get() + other.get()) / 2) - .expect("get() >= 1, get() + get() >= 2, half >= 1") - } - - fn from_primitive(i: Primitive) -> Self { - i.try_into().ok().and_then(Self::new).expect("Invalid usize") - } - - fn to_primitive(self) -> Primitive { self.get().try_into().expect("Too many entities") } - - type Range = impl Iterator; - fn range(range: ops::Range) -> Self::Range { - (range.start.get()..range.end.get()).map(|v| { - NonZeroU32::new(v).expect("zero does not appear between two non-zero unsigned integers") - }) - } -} - /// An atomic variant of [`Raw`]. pub trait Atomic: Send + Sync + 'static { /// Equivalent to `AtomicUsize::fetch_add(self, count, Ordering::SeqCst)` @@ -100,26 +66,68 @@ pub trait Atomic: Send + Sync + 'static { fn load_mut(&mut self) -> E; } -impl Atomic for AtomicU32 { - fn fetch_add(&self, count: usize) -> NonZeroU32 { - let original = AtomicU32::fetch_add( - self, - count.try_into().expect("count is too large"), - Ordering::SeqCst, - ); - NonZeroU32::new(original).expect("integer overflow") - } - - fn load(&self) -> NonZeroU32 { - let original = AtomicU32::load(self, Ordering::SeqCst); - NonZeroU32::new(original).expect("invalid state") - } - - fn load_mut(&mut self) -> NonZeroU32 { - let original = *AtomicU32::get_mut(self); - NonZeroU32::new(original).expect("invalid state") - } +macro_rules! impl_raw { + ($base:ty, $atomic:ty, $primitive:ty) => { + impl Raw for $base { + type Atomic = $atomic; + + fn new() -> Self::Atomic { <$atomic>::new(1) } + + fn add(self, count: usize) -> Self { + let count: $primitive = count.try_into().expect("count is too large"); + <$base>::new(self.get() + count).expect("integer overflow") + } + + fn sub(self, other: Self) -> usize { + (self.get() - other.get()).try_into().expect("usize should be sufficiently large") + } + + fn approx_midpoint(self, other: Self) -> Self { + <$base>::new((self.get() + other.get()) / 2) + .expect("get() >= 1, get() + get() >= 2, half >= 1") + } + + fn from_primitive(i: Primitive) -> Self { + i.try_into().ok().and_then(Self::new).expect("Invalid usize") + } + + fn to_primitive(self) -> Primitive { self.get().try_into().expect("Too many entities") } + + type Range = impl Iterator; + fn range(range: ops::Range) -> Self::Range { + (range.start.get()..range.end.get()).map(|v| { + <$base>::new(v) + .expect("zero does not appear between two non-zero unsigned integers") + }) + } + } + + impl Atomic<$base> for $atomic { + fn fetch_add(&self, count: usize) -> $base { + let original = <$atomic>::fetch_add( + self, + count.try_into().expect("count is too large"), + atomic::Ordering::SeqCst, + ); + <$base>::new(original).expect("integer overflow") + } + + fn load(&self) -> $base { + let original = <$atomic>::load(self, atomic::Ordering::SeqCst); + <$base>::new(original).expect("invalid state") + } + + fn load_mut(&mut self) -> $base { + let original = *<$atomic>::get_mut(self); + <$base>::new(original).expect("invalid state") + } + } + }; } +impl_raw!(std::num::NonZeroU16, std::sync::atomic::AtomicU16, u16); +impl_raw!(std::num::NonZeroU32, std::sync::atomic::AtomicU32, u32); +impl_raw!(std::num::NonZeroU64, std::sync::atomic::AtomicU64, u64); + /// The primitive scalar type. pub type Primitive = usize; diff --git a/src/lib.rs b/src/lib.rs index 4bb78e6948..14460bd67b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -163,10 +163,12 @@ allow(dead_code) )] #![cfg_attr(doc, warn(missing_docs))] -#![feature(impl_trait_in_assoc_type)] +#![feature(return_position_impl_trait_in_trait)] +#![feature(type_alias_impl_trait, impl_trait_in_assoc_type)] #![feature(maybe_uninit_uninit_array, maybe_uninit_array_assume_init)] #![feature(never_type)] #![feature(sync_unsafe_cell)] +#![feature(slice_take)] /// Internal re-exports used in macros. #[doc(hidden)] diff --git a/src/macros.rs b/src/macros.rs index 2cf62d4537..fbe29bd23a 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -1,30 +1,10 @@ -/// Declares a composite struct that implements -/// [`Accessor`](crate::system::Accessor) and [`Chunked`](crate::system::ChunkedAccessor) -/// by delegation to all fields and reconstructing the same struct with different types. -/// -/// # Example -/// ``` -/// dynec::accessors! { -/// /// This is an example accessor set struct. -/// /// We can document it and apply attributes on it. -/// #[allow(dead_code)] -/// pub Foo { -/// /// This documents the field. -/// pub(crate) bar, -/// qux, -/// } -/// } -/// ``` -#[doc(inline)] -pub use dynec_codegen::accessors; - -#[cfg(test)] -mod accessors_tests {} - /// Declares archetypes. /// /// # Example /// ``` +/// use std::collections::BTreeSet; +/// use std::num::NonZeroU16; +/// /// dynec::archetype! { /// /// This is an example archetype. /// /// We can document it and apply attributes on it. @@ -35,6 +15,9 @@ mod accessors_tests {} /// /// separated by semicolons. /// /// The trailing semicolon is optional. /// pub(crate) Bar; +/// +/// /// Options can be applied in parentheses. +/// pub Qux(raw_entity = NonZeroU16, recycler = BTreeSet); /// } /// /// static_assertions::assert_impl_all!(Foo: dynec::Archetype); @@ -42,11 +25,30 @@ mod accessors_tests {} /// ``` /// /// Since documentation, attributes, visibility and the trailing semicolon are optional, -/// private undocumented archetypes can be declared in a single line as well: +/// a private undocumented archetype can be declared in a single line as well: +/// /// ``` /// dynec::archetype!(Foo); /// static_assertions::assert_impl_all!(Foo: dynec::Archetype); /// ``` +/// +/// # Options +/// Options are applied in parentheses after an archetype identifier. +/// Multiple options are separated by commas. +/// +/// ## `raw_entity = $ty` +/// Selects the [backing type for entity ID](crate::entity::Raw) for entities of this archetype. +/// The default value is [`NonZeroU32`](std::num::NonZeroU32). +/// +/// ## `recycler = $ty` +/// Selects the data structure used in the recycling entity allocator to +/// [recycle](crate::entity::ealloc::Recycler) freed IDs. +/// The default value is [`Vec<#raw_entity>`](Vec). +/// +/// ## `shard_assigner = $ty` +/// Selects the [strategy to assign](crate::entity::ealloc::ShardAssigner) available entity IDs +/// to different hsards. +/// The default value is [`ThreadRngShardAssigner`](crate::entity::ealloc::ThreadRngShardAssigner). #[doc(inline)] pub use dynec_codegen::archetype; @@ -238,7 +240,7 @@ mod comps_tests {} /// fn test_system(#[dynec(global)] _qux: &Qux) {} /// /// let mut builder = dynec::world::Builder::new(1); -/// builder.schedule(Box::new(test_system.build())); +/// builder.schedule(test_system.build()); /// builder.build(); /// ``` #[doc(inline)] @@ -364,22 +366,24 @@ mod global_tests {} /// ``` /// /// ## Simple components -/// Parameters in the form `impl ReadSimple` or `impl WriteSimple`, +/// Parameters of type `ReadSimple` or `WriteSimple`, /// request access to a [simple component](crate::comp::Simple) of type `C` -/// from entities of the [archetype](crate::Archetype) `A`, -/// exposed through a type that implements [`system::ReadSimple`](crate::system::ReadSimple) -/// or [`system::WriteSimple`](crate::system::WriteSimple). +/// from entities of the [archetype](crate::Archetype) `A`. /// The latter provides mutable and exclusive access to the component storages. /// -/// ### Using other bounds -/// Other trait bounds for the parameter are also allowed, -/// but the macro would not be able to infer type parameters and mutability. +/// ### Using other aliases +/// Using type aliases/renamed imports for the types is also allowed, +/// but the macro would be unable to infer type parameters and mutability. /// In such cases, they must be indicated explicitly in the attribute. /// See the syntax reference below for details. /// -/// ### Uninitialized entity references. -/// Entity creation ordering is automatically enforced if `C` contains entity references, -/// Use the `maybe_uninit` attribute to remove this ordering. +/// ### Uninitialized entity references +/// If `C` contains [references](crate::entity::Referrer) to entities of some archetype `T`, +/// the scheduler automatically enforces that the system runs before +/// any systems that create entities of archetype `T`, +/// because components for entities created through [`EntityCreator`](crate::system::EntityCreator) +/// are uninitialized until the current cycle completes. +/// Use the `maybe_uninit` attribute to remove this ordering limitation. /// /// See [`EntityCreationPartition`](crate::system::partition::EntityCreationPartition#component-accessors) /// for more information. @@ -389,10 +393,10 @@ mod global_tests {} /// # /* /// #[dynec(simple( /// // Optional, specifies the archetype and component explicitly. -/// // Only required when the parameter type is not `impl ReadSimple`/`impl WriteSimple`. +/// // Only required when the parameter type is not `ReadSimple`/`WriteSimple`. /// arch = $ty, comp = $ty, /// // Optional, indicates that the component access is exclusive explicitly. -/// // Only required when the parameter type is not `impl WriteSimple`. +/// // Only required when the parameter type is not `WriteSimple`. /// mut, /// // Optional, acknowledges that the entities of the specified archetypes /// // contained in the simple components may be uninitialized. @@ -402,17 +406,16 @@ mod global_tests {} /// ``` /// /// ## Isotope components -/// Parameters in the form `impl ReadIsotope` or `impl WriteIsotope`, +/// Parameters of type [`(Read|Write)Isotope(Full|Partial)`](mod@crate::system#types) /// request access to an [isotope component](crate::comp::Isotope) of type `C` -/// from entities of the [archetype](crate::Archetype) `A`, -/// exposed through a type that implements [`system::ReadIsotope`](crate::system::ReadIsotope) -/// or [`system::WriteIsotope`](crate::system::WriteIsotope). -/// The latter provides mutable and exclusive access to the component storages. +/// from entities of the [archetype](crate::Archetype) `A`. +/// The `Write` variants provide mutable and exclusive access to the component storages. /// /// ### Partial isotope access -/// By default, all discriminants of the isotope component are requested, -/// such that writes are exclusive with all systems that read any part of the discriminants. -/// The accessor can be made partial instead: +/// If [`ReadIsotopePartial`](crate::system::ReadIsotopePartial) or +/// [`WriteIsotopePartial`](crate::system::WriteIsotopePartial) is used, +/// the system only requests access to specific discriminants of the isotope component. +/// The actual discriminants are specified with an attribute: /// /// ``` /// # /* @@ -420,26 +423,31 @@ mod global_tests {} /// # */ /// ``` /// -/// The expression `discrim_set` implements -/// [discrim::Set](crate::comp::discrim::Set), -/// which is the set of discriminants that this system uses. -/// The expression can reference local and param states directly. -/// However, since it is only evaluated once before the first run of the system, -/// subsequent writes to the states have no effect on the resolved discriminant set. +/// The expression `discrim_set` contains the set of discriminants requested by this system +/// contained in an implementation of +/// [discrim::Set](crate::comp::discrim::Set)<C::[Discrim](crate::comp::Discrim)>, +/// which is typically an array or a [`Vec`]. +/// The expression may reference param states directly. +/// The expression is only evaluated once before the first run of the system, +/// so it will not react to subsequent changes to the param states. /// /// `K` is the type of the [key](crate::comp::discrim::Set::Key) to index the discriminant set. /// /// See the documentation of [`discrim::Set`](crate::comp::discrim::Set) for more information. /// -/// ### Using other bounds -/// Other trait bounds for the parameter are also allowed, -/// but the macro would not be able to infer type parameters and mutability. +/// ### Using other aliases +/// Using type aliases/renamed imports for the types is also allowed, +/// but the macro would be unable to infer type parameters and mutability. /// In such cases, they must be indicated explicitly in the attribute. /// See the syntax reference below for details. /// -/// ### Uninitialized entity references. -/// Entity creation ordering is automatically enforced if `C` contains entity references, -/// Use the `maybe_uninit` attribute to remove this ordering. +/// ### Uninitialized entity references +/// If `C` contains [references](crate::entity::Referrer) to entities of some archetype `T`, +/// the scheduler automatically enforces that the system runs before +/// any systems that create entities of archetype `T`, +/// because components for entities created through [`EntityCreator`](crate::system::EntityCreator) +/// are uninitialized until the current cycle completes. +/// Use the `maybe_uninit` attribute to remove this ordering limitation. /// /// See [`EntityCreationPartition`](crate::system::partition::EntityCreationPartition#component-accessors) /// for more information. @@ -448,13 +456,16 @@ mod global_tests {} /// ``` /// # /* /// #[dynec(isotope( -/// // Optional, indicates that this accessor only uses the given subset of discriminants. +/// // Required if and only if the type is ReadIsotopePartial or WriteIsotopePartial. /// discrim = $expr, -/// // Optional, must be the same as the `Key` associated type of the `discrim` expression. -/// // Only required when the parameter type is not `impl ReadIsotope`/`impl WriteIsotope`. -/// discrim_key = $ty, +/// // Optional, must be the same as the type of the `discrim` expression. +/// // Only required when the parameter type is not `ReadIsotopePartial`/`WriteIsotopePartial`. +/// // Note that `ReadIsotopePartial`/`WriteIsotopePartial` have an optional third type parameter +/// // that expects the same type as `discrim_set`, +/// // which is `Vec` by default. +/// discrim_set = $ty, /// // Optional, specifies the archetype and component explicitly. -/// // Only required when the parameter type is not `impl ReadIsotope`/`impl WriteIsotope`. +/// // Only required when the parameter type is not `(Read|Write)Isotope(Full|Partial)`. /// arch = $ty, comp = $ty, /// // Optional, indicates that the component access is exclusive explicitly. /// // Only required when the parameter type is not `impl WriteSimple`. @@ -467,7 +478,7 @@ mod global_tests {} /// ``` /// /// ## Entity creation -/// Parameters that require an implementation of [`EntityCreator`](crate::system::EntityCreator) +/// Parameters that require an [`EntityCreator`](crate::system::EntityCreator) /// can be used to create entities. /// The archetype of created entities is specified in the type bounds. /// Note that entity creation is asynchronous to ensure synchronization, @@ -501,7 +512,7 @@ mod global_tests {} /// ``` /// /// ## Entity deletion -/// Parameters that require an implementation of [`EntityDeleter`](crate::system::EntityDeleter) +/// Parameters that require an [`EntityDeleter`](crate::system::EntityDeleter) /// can be used to delete entities. /// The archetype of deleted entities is specified in the type bounds. /// Note that `EntityDeleter` can only be used to mark entities as "deleting"; @@ -511,9 +522,11 @@ mod global_tests {} /// It is advisable to execute finalizer-removing systems /// after systems that mark entities for deletion finish executing. /// This allows deletion to happen in the same cycle, -/// thus slightly improving entity deletion performance +/// thus slightly reducing entity deletion latency /// (but this is not supposed to be critical anyway). -/// Nevertheless, unlike entity creation, entity deletion does not have an automatic partition. +/// Nevertheless, unlike entity creation, +/// the scheduler does not automatically enforce ordering between +/// finalizer-manipulating systems and entity-deleting systems. /// /// ### Syntax reference /// ``` @@ -526,6 +539,22 @@ mod global_tests {} /// # */ /// ``` /// +/// ## Entity iterator +/// Parameters that require an [`EntityIterator`](crate::system::EntityIterator) +/// can be used to iterate over entities and zip multiple component iterators. +/// See the documentation for `EntityIterator` for details. +/// +/// ### Syntax reference +/// ``` +/// # /* +/// /// This attribute is not required unless `EntityIterator` is aliased. +/// #[dynec(entity_iterator( +/// // Optional, specifies the archetype if `EntityIterator` is aliased. +/// arch = $ty, +/// ))] +/// # */ +/// ``` +/// /// # Example /// ``` /// use dynec::system; @@ -560,13 +589,13 @@ mod global_tests {} /// #[dynec(local(initial = 0))] counter: &mut u16, /// #[dynec(param)] &skill_id: &SkillType, /// #[dynec(global)] title: &mut Title, -/// x: impl system::WriteSimple, -/// y: impl system::WriteSimple, -/// dir: impl system::ReadSimple, -/// #[dynec(isotope(discrim = [skill_id]))] skill: impl system::ReadIsotope< +/// x: system::WriteSimple, +/// y: system::WriteSimple, +/// dir: system::ReadSimple, +/// #[dynec(isotope(discrim = [skill_id]))] skill: system::ReadIsotopePartial< /// Player, /// SkillLevel, -/// usize, +/// [SkillType; 1], /// >, /// ) { /// *counter += 1; @@ -786,7 +815,7 @@ macro_rules! system_test { ) => {{ let mut builder = $crate::world::Builder::new(0); $( - builder.schedule(Box::new($systems)); + builder.schedule($systems); )* #[allow(unused_mut)] @@ -802,6 +831,34 @@ macro_rules! system_test { }} } +/// Similar to [`system_test`], but returns the entities in the form +/// `(world, (ent1, ent2, ...))` +#[macro_export] +macro_rules! system_test_exported { + ( + $($systems:expr),* ; + $( + $(let $var:ident :)? $arch:ty = ($($components:tt)*); + )* + ) => {{ + let mut builder = $crate::world::Builder::new(0); + $( + builder.schedule($systems); + )* + + #[allow(unused_mut)] + let mut world = builder.build(); + + $( + $(let $var = )? world.create::<$arch>( + $crate::comps![@($crate) $arch => $($components)*] + ); + )* + + (world, ($($($var,)?)*)) + }} +} + /// Asserts that a type can be used as a partition. /// /// # Example @@ -823,3 +880,32 @@ macro_rules! assert_partition { }; }; } + +/// Declares a composite struct that implements +/// [`IntoZip`](crate::system::iter::IntoZip), [`Zip`](crate::system::iter::Zip) +/// and [`ZipChunked`](crate::system::iter::ZipChunked) +/// by delegation to all fields and reconstructing the same struct with different types. +/// +/// All fields accept arbitrary types, similar to a tuple, +/// and are projected to the corresponding storages upon entity iteration. +/// +/// # Example +/// ``` +/// #![feature(return_position_impl_trait_in_trait)] +/// +/// dynec::zip! { +/// /// This is an example zip struct. +/// /// We can document it and apply attributes on it. +/// #[allow(dead_code)] +/// pub Foo { +/// /// This documents the field. +/// pub(crate) bar, +/// qux, +/// } +/// } +/// ``` +#[doc(inline)] +pub use dynec_codegen::zip; + +#[cfg(test)] +mod zip_tests {} diff --git a/src/storage.rs b/src/storage.rs index 507d283902..cf0d5056b3 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -17,34 +17,10 @@ pub(crate) use isotope::{AnyMap as AnyIsotopeMap, Map as IsotopeMap, MapInner as mod tests; /// A storage for storing component data. -/// -/// # Safety -/// Implementors of this trait must ensure that -/// [`get`](Self::get) and [`get_mut`](Self::get_mut) are consistent and [injective][injective]. -/// In other words, for any `a: Self::RawEntity`, -/// `get(a)` and `get_mut(a)` return the same value (only differing by mutability), -/// and for any `b: Self::RawEntity` where `a != b`, `get(a)` must not alias `get(b)`. -/// -/// This implies the implementation is not safe if -/// [`Eq`] and [`Ord`] are incorrectly implemented for `Self::RawEntity`, -/// which is why [`entity::Raw`] is an unsafe trait -/// that strictly requires complete equivalence and ordering. -/// (Imagine if `RawEntity` is [`f64`], and `a` and `b` are both [`f64::NAN`]; -/// then `a != b` but `get_mut(a)` would still alias `get_mut(b)`) -/// -/// [injective]: https://en.wikipedia.org/wiki/Injective_function -pub unsafe trait Storage: Default + Send + Sync + 'static { - /// The type of entity ID used for identification. - type RawEntity: entity::Raw; - /// The component type stored. - type Comp: Send + Sync; - +pub trait Storage: Access + Default + Send + Sync + 'static { /// Gets a shared reference to the component for a specific entity if it is present. fn get(&self, id: Self::RawEntity) -> Option<&Self::Comp>; - /// Gets a mutable reference to the component for a specific entity if it is present. - fn get_mut(&mut self, id: Self::RawEntity) -> Option<&mut Self::Comp>; - /// Sets or removes the component for a specific entity, /// returning the original value if it was present. fn set(&mut self, id: Self::RawEntity, value: Option) -> Option; @@ -65,13 +41,10 @@ pub unsafe trait Storage: Default + Send + Sync + 'static { /// where `slice` is the slice of components in the chunk, /// and `index` is the entity index of `slice[0]`. /// `slice` is always nonempty. + /// + /// Non-chunked storages should implement this function by returning a chunk for each entity. fn iter_chunks(&self) -> Self::IterChunks<'_>; - /// Return value of [`iter_mut`](Self::iter_mut). - type IterMut<'t>: Iterator + 't; - /// Returns a mutable iterator over the storage, ordered by entity index order. - fn iter_mut(&mut self) -> Self::IterMut<'_>; - /// Return value of [`iter_chunks_mut`](Self::iter_chunks_mut). type IterChunksMut<'t>: Iterator> + 't; /// Returns a mutable iterator of slices over the storage, ordered by entity index order. @@ -80,21 +53,16 @@ pub unsafe trait Storage: Default + Send + Sync + 'static { /// where `slice` is the slice of components in the chunk, /// and `index` is the entity index of `slice[0]`. /// `slice` is always nonempty. + /// + /// Non-chunked storages should implement this function by returning a chunk for each entity. fn iter_chunks_mut(&mut self) -> Self::IterChunksMut<'_>; - /// Return value of [`partition_at`](Self::partition_at). - type Partition<'u>: Partition<'u, Self::RawEntity, Self::Comp> + /// Return value of [`as_partition`](Self::as_partition). + type Partition<'u>: Partition<'u, RawEntity = Self::RawEntity, Comp = Self::Comp> where Self: 'u; /// Converts the storage to a [`Partition`] that covers the whole storage (similar to `slice[..]`). fn as_partition(&mut self) -> Self::Partition<'_>; - /// Splits the storage into two partitions for parallel iterable access. - fn partition_at( - &mut self, - offset: Self::RawEntity, - ) -> (Self::Partition<'_>, Self::Partition<'_>) { - self.as_partition().partition_at(offset) - } } /// Borrows a slice of a storage, analogously `&'t mut Storage[..]`. @@ -102,48 +70,65 @@ pub unsafe trait Storage: Default + Send + Sync + 'static { /// This trait does not provide `set` because /// adding/removing items may cause rebalances in the tree implementation /// and result in dangling references in other partitions that are not `&mut`-locked. -pub trait Partition<'t, E: entity::Raw, C: Send + Sync + 'static>: Send + Sync + Sized { +pub trait Partition<'t>: Access + Send + Sync + Sized + 't { /// Return value of [`by_ref`](Self::by_ref). - type ByRef<'u>: Partition<'u, E, C> + type ByRef<'u>: Partition<'u, RawEntity = Self::RawEntity, Comp = Self::Comp> where Self: 'u; /// Re-borrows the partition with reduced lifetime. /// - /// This is useful for calling [`iter_mut`](Self::iter_mut) - /// and [`partition_at`](Self::partition_at), + /// This is useful for calling [`into_iter_mut`](Self::into_iter_mut) + /// and [`split_at`](Self::split_at), /// which take `self` as receiver to preserve the lifetime. fn by_ref(&mut self) -> Self::ByRef<'_>; - /// Gets a mutable reference to the component for a specific entity if it is present. - fn get_mut(&mut self, entity: E) -> Option<&mut C>; - - /// Return value of [`iter_mut`](Self::iter_mut). - type IterMut: Iterator; - /// Returns a mutable iterator over the storage, ordered by entity index order. - fn iter_mut(self) -> Self::IterMut; - /// Splits the partition further into two subpartitions. /// `entity` must be `> 0` and `< partition_length`, /// i.e. the expected key ranges of both partitions must be nonempty. /// (It is allowed to have a nonempty range which does not contain any existing keys) - fn partition_at(self, entity: E) -> (Self, Self); + fn split_at(mut self, entity: Self::RawEntity) -> (Self, Self) { + let right = self.split_out(entity); + (self, right) + } + + /// Splits the partition further into two subpartitions, + /// replacing `self` with the left partition. + /// + /// `entity` must be `> 0` and `< partition_length`, + /// i.e. the expected key ranges of both partitions must be nonempty. + /// (It is allowed to have a nonempty range which does not contain any existing keys) + fn split_out(&mut self, entity: Self::RawEntity) -> Self; + + /// Return value of [`into_iter_mut`](Self::into_iter_mut). + type IntoIterMut: Iterator; + /// Same as [`iter_mut`](Access::iter_mut), but moves the partition object into the iterator. + fn into_iter_mut(self) -> Self::IntoIterMut; + + /// Same as [`get_mut`](Access::get_mut), but returns a reference with lifetime `'t`. + fn into_mut(self, entity: Self::RawEntity) -> Option<&'t mut Self::Comp>; +} + +/// Mutable access functions for a storage, generalizing [`Storage`] and [`Partition`]. +pub trait Access { + /// The type of entity ID used for identification. + type RawEntity: entity::Raw; + /// The component type stored. + type Comp: Send + Sync + 'static; + + /// Gets a mutable reference to the component for a specific entity if it is present. + fn get_mut(&mut self, entity: Self::RawEntity) -> Option<&mut Self::Comp>; + + /// Return value of [`iter_mut`](Self::iter_mut). + type IterMut<'u>: Iterator + 'u + where + Self: 'u; + /// Returns a mutable iterator over the storage, ordered by entity index order. + fn iter_mut(&mut self) -> Self::IterMut<'_>; } /// Provides chunked access capabilities, /// i.e. the storage can always return a slice for contiguous present components. -/// -/// # Safety -/// Implementors of this trait must ensure that -/// [`get_chunk`](Self::get_chunk) and [`get_chunk_mut`](Self::get_chunk_mut) are consistent, -/// and non-overlapping ranges map to non-overlapping slices. -/// In other words, for any `a, b: Self::RawEntity` where `a < b`, -/// `get_chunk(a, b)` and `get_chunk_mut(a, b)` return the same slice -/// (only differing by mutability), -/// and for any `c, d: Self::RawEntity` where `b <= c` `c < d`, -/// `get_chunk(a, b)` must not alias `get_chunk(c, d)`. -/// -/// [injective]: https://en.wikipedia.org/wiki/Injective_function -pub unsafe trait Chunked: Storage { +pub trait Chunked: Storage + AccessChunked { /// Gets a shared reference to a slice of components. /// /// Returns `None` if any of the components in the range is missing. @@ -151,6 +136,14 @@ pub unsafe trait Chunked: Storage { /// Panics if `start > end`. fn get_chunk(&self, start: Self::RawEntity, end: Self::RawEntity) -> Option<&[Self::Comp]>; + /// Return value of [`as_partition_chunk`](Self::as_partition_chunk). + type PartitionChunked<'u>: PartitionChunked<'u, RawEntity = Self::RawEntity, Comp = Self::Comp>; + /// Converts the storage to a [`PartitionChunked`] that covers the whole storage (similar to `slice[..]`). + fn as_partition_chunk(&mut self) -> Self::PartitionChunked<'_>; +} + +/// Mutable chunk access functions for a storage, generalizing [`Chunked`] and [`PartitionChunked`]. +pub trait AccessChunked: Access { /// Gets a mutable reference to a slice of components. /// /// Returns `None` if any of the components in the range is missing. @@ -161,11 +154,6 @@ pub unsafe trait Chunked: Storage { start: Self::RawEntity, end: Self::RawEntity, ) -> Option<&mut [Self::Comp]>; - - /// Return value of [`as_partition_chunk`](Self::as_partition_chunk). - type PartitionChunked<'u>: PartitionChunked<'u, Self::RawEntity, Self::Comp>; - /// Converts the storage to a [`PartitionChunked`] that covers the whole storage (similar to `slice[..]`). - fn as_partition_chunk(&mut self) -> Self::PartitionChunked<'_>; } /// Borrows a slice of a chunked storage, analogously `&'t mut Chunked[..]`. @@ -173,18 +161,17 @@ pub unsafe trait Chunked: Storage { /// This trait does not provide `set` because /// adding/removing items may cause rebalances in the tree implementation /// and result in dangling references in other partitions that are not `&mut`-locked. -pub trait PartitionChunked<'t, E: entity::Raw, C: Send + Sync + 'static>: - Partition<'t, E, C> -{ - /// Gets a mutable reference to a slice of components. - /// - /// Returns `None` if any of the components in the range is missing. - /// - /// Panics if `start > end`. - fn get_chunk_mut(&mut self, start: E, end: E) -> Option<&mut [C]>; +pub trait PartitionChunked<'t>: Partition<'t> + AccessChunked { + /// Gets a mutable reference to a slice of components, + /// preserving the lifetime `'t` of this partition object. + fn into_chunk_mut( + self, + start: Self::RawEntity, + end: Self::RawEntity, + ) -> Option<&'t mut [Self::Comp]>; /// Return value of [`into_iter_chunks_mut`](Self::into_iter_chunks_mut). - type IntoIterChunksMut: Iterator; + type IntoIterChunksMut: Iterator; /// Returns a mutable iterator over the storage, ordered by entity index order. fn into_iter_chunks_mut(self) -> Self::IntoIterChunksMut; } diff --git a/src/storage/tests.rs b/src/storage/tests.rs index e9c923bd93..2b7861b52f 100644 --- a/src/storage/tests.rs +++ b/src/storage/tests.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use std::num::NonZeroU32; -use crate::storage::Partition; +use crate::storage::{Access, Partition}; use crate::Storage; macro_rules! test_storage { @@ -12,16 +12,10 @@ macro_rules! test_storage { test_single_small_hole test_single_big_hole_with_reinsertion test_partition_no_panic - #[should_panic = "Entity 5 is not in the partition ..4"] test_partition_panic_left_some - #[should_panic = "Entity 4 is not in the partition ..4"] test_partition_panic_left_none #[should_panic = "Entity 3 is not in the partition 5.."] test_partition_panic_right_some #[should_panic = "Entity 4 is not in the partition 5.."] test_partition_panic_right_none - #[should_panic = "Entity 4 is not in the partition ..3"] test_repartition_panic_ll_lr - #[should_panic = "Entity 8 is not in the partition ..3"] test_repartition_panic_ll_r #[should_panic = "Entity 2 is not in the partition 3.."] test_repartition_panic_lr_ll - #[should_panic = "Entity 8 is not in the partition ..5"] test_repartition_panic_lr_r #[should_panic = "Entity 2 is not in the partition 5.."] test_repartition_panic_rl_l - #[should_panic = "Entity 8 is not in the partition ..7"] test_repartition_panic_rl_rr #[should_panic = "Entity 3 is not in the partition 7.."] test_repartition_panic_rr_l #[should_panic = "Entity 6 is not in the partition 7.."] test_repartition_panic_rr_rl } @@ -163,7 +157,7 @@ where { let mut storage: S = setup_partition_storage(); { - let (mut left, mut right) = storage.partition_at(NonZeroU32::new(4).unwrap()); + let (mut left, mut right) = storage.as_partition().split_at(NonZeroU32::new(4).unwrap()); assert_eq!(left.get_mut(NonZeroU32::new(1).unwrap()), Some(&mut 1)); assert_eq!(left.get_mut(NonZeroU32::new(3).unwrap()), Some(&mut 3)); assert_eq!(right.get_mut(NonZeroU32::new(4).unwrap()), None); @@ -171,7 +165,7 @@ where assert_eq!(right.get_mut(NonZeroU32::new(9).unwrap()), Some(&mut 9)); } { - let (mut left, mut right) = storage.partition_at(NonZeroU32::new(5).unwrap()); + let (mut left, mut right) = storage.as_partition().split_at(NonZeroU32::new(5).unwrap()); assert_eq!(left.get_mut(NonZeroU32::new(1).unwrap()), Some(&mut 1)); assert_eq!(left.get_mut(NonZeroU32::new(3).unwrap()), Some(&mut 3)); assert_eq!(left.get_mut(NonZeroU32::new(4).unwrap()), None); @@ -185,7 +179,7 @@ where S: Storage, { let mut storage: S = setup_partition_storage(); - let (mut left, _) = storage.partition_at(NonZeroU32::new(4).unwrap()); + let (mut left, _) = storage.as_partition().split_at(NonZeroU32::new(4).unwrap()); left.get_mut(NonZeroU32::new(5).unwrap()); } @@ -194,7 +188,7 @@ where S: Storage, { let mut storage: S = setup_partition_storage(); - let (mut left, _) = storage.partition_at(NonZeroU32::new(4).unwrap()); + let (mut left, _) = storage.as_partition().split_at(NonZeroU32::new(4).unwrap()); left.get_mut(NonZeroU32::new(4).unwrap()); } @@ -203,7 +197,7 @@ where S: Storage, { let mut storage: S = setup_partition_storage(); - let (_, mut right) = storage.partition_at(NonZeroU32::new(5).unwrap()); + let (_, mut right) = storage.as_partition().split_at(NonZeroU32::new(5).unwrap()); right.get_mut(NonZeroU32::new(3).unwrap()); } @@ -212,7 +206,7 @@ where S: Storage, { let mut storage: S = setup_partition_storage(); - let (_, mut right) = storage.partition_at(NonZeroU32::new(5).unwrap()); + let (_, mut right) = storage.as_partition().split_at(NonZeroU32::new(5).unwrap()); right.get_mut(NonZeroU32::new(4).unwrap()); } @@ -232,10 +226,10 @@ macro_rules! repartition_panic_test { { let mut storage: S = setup_partition_storage(); - let first_pair = storage.partition_at(NonZeroU32::new($first_cut).unwrap()); + let first_pair = storage.as_partition().split_at(NonZeroU32::new($first_cut).unwrap()); let first = repartition_panic_test!(@take $first_half of first_pair); - let second_pair = first.partition_at(NonZeroU32::new($second_cut).unwrap()); + let second_pair = first.split_at(NonZeroU32::new($second_cut).unwrap()); let mut second = repartition_panic_test!(@take $second_half of second_pair); second.get_mut(NonZeroU32::new($probe).unwrap()); diff --git a/src/storage/tree.rs b/src/storage/tree.rs index 0590ad540c..c7c677059e 100644 --- a/src/storage/tree.rs +++ b/src/storage/tree.rs @@ -2,29 +2,37 @@ use std::cell::SyncUnsafeCell; use std::collections::BTreeMap; use std::slice; -use super::{ChunkMut, ChunkRef, Storage}; +use super::{Access, ChunkMut, ChunkRef, Partition, Storage}; use crate::entity; /// A storage based on [`BTreeMap`]. -pub struct Tree { - // `SyncUnsafeCell` here must be treaeted as a normal `C` +pub struct Tree { + // `SyncUnsafeCell` here must be treated as a normal `C` // unless the whole storage is mutably locked, // which means the current function exclusively manages this map. // `&Tree` must not be used to access the cells mutably. - data: BTreeMap>, + data: BTreeMap>, } -impl Default for Tree { +impl Default for Tree { fn default() -> Self { Self { data: BTreeMap::new() } } } -// Safety: the backend of `get`/`get_mut` is a BTreeSet, -// which is defined to be injective -// assuming correct implementation of Eq + Ord. -unsafe impl Storage for Tree { - type RawEntity = E; +impl Access for Tree { + type RawEntity = RawT; type Comp = C; + fn get_mut(&mut self, id: Self::RawEntity) -> Option<&mut C> { + self.data.get_mut(&id).map(|cell| cell.get_mut()) + } + + type IterMut<'t> = impl Iterator + 't; + fn iter_mut(&mut self) -> Self::IterMut<'_> { + Box::new(self.data.iter_mut().map(|(&entity, cell)| (entity, cell.get_mut()))) + } +} + +impl Storage for Tree { fn get(&self, id: Self::RawEntity) -> Option<&C> { self.data.get(&id).map(|cell| unsafe { // Safety: `&self` implies that nobody else can mutate the values. @@ -32,10 +40,6 @@ unsafe impl Storage for Tree { }) } - fn get_mut(&mut self, id: Self::RawEntity) -> Option<&mut C> { - self.data.get_mut(&id).map(|cell| cell.get_mut()) - } - fn set(&mut self, id: Self::RawEntity, new: Option) -> Option { match new { Some(new) => self.data.insert(id, SyncUnsafeCell::new(new)), @@ -61,32 +65,27 @@ unsafe impl Storage for Tree { self.iter().map(|(entity, item)| ChunkRef { slice: slice::from_ref(item), start: entity }) } - type IterMut<'t> = impl Iterator + 't; - fn iter_mut(&mut self) -> Self::IterMut<'_> { - Box::new(self.data.iter_mut().map(|(&entity, cell)| (entity, cell.get_mut()))) - } - type IterChunksMut<'t> = impl Iterator> + 't; fn iter_chunks_mut(&mut self) -> Self::IterChunksMut<'_> { self.iter_mut() .map(|(entity, item)| ChunkMut { slice: slice::from_mut(item), start: entity }) } - type Partition<'t> = StoragePartition<'t, E, C>; + type Partition<'t> = StoragePartition<'t, RawT, C>; fn as_partition(&mut self) -> Self::Partition<'_> { StoragePartition { data: &self.data, lower_bound: None, upper_bound: None } } } -/// Return value of [`Tree::partition_at`]. -pub struct StoragePartition<'t, E: entity::Raw, C> { - data: &'t BTreeMap>, - lower_bound: Option, - upper_bound: Option, +/// Return value of [`Tree::split_at`]. +pub struct StoragePartition<'t, RawT: entity::Raw, C> { + data: &'t BTreeMap>, + lower_bound: Option, + upper_bound: Option, } -impl<'t, E: entity::Raw, C> StoragePartition<'t, E, C> { - fn assert_bounds(&self, entity: E) { +impl<'t, RawT: entity::Raw, C> StoragePartition<'t, RawT, C> { + fn assert_bounds(&self, entity: RawT) { if let Some(bound) = self.lower_bound { assert!(entity >= bound, "Entity {entity:?} is not in the partition {bound:?}.."); } @@ -96,19 +95,11 @@ impl<'t, E: entity::Raw, C> StoragePartition<'t, E, C> { } } -impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> - for StoragePartition<'t, E, C> -{ - type ByRef<'u> = StoragePartition<'u, E, C> where Self: 'u; - fn by_ref(&mut self) -> Self::ByRef<'_> { - StoragePartition { - data: self.data, - lower_bound: self.lower_bound, - upper_bound: self.upper_bound, - } - } +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> Access for StoragePartition<'t, RawT, C> { + type RawEntity = RawT; + type Comp = C; - fn get_mut(&mut self, entity: E) -> Option<&mut C> { + fn get_mut(&mut self, entity: RawT) -> Option<&mut C> { self.assert_bounds(entity); let cell = self.data.get(&entity)?; @@ -120,11 +111,27 @@ impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> } } - type IterMut = impl Iterator; - fn iter_mut(self) -> Self::IterMut { + type IterMut<'u> = impl Iterator + 'u where Self: 'u; + fn iter_mut(&mut self) -> Self::IterMut<'_> { self.by_ref().into_iter_mut() } +} + +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> Partition<'t> + for StoragePartition<'t, RawT, C> +{ + type ByRef<'u> = StoragePartition<'u, RawT, C> where Self: 'u; + fn by_ref(&mut self) -> Self::ByRef<'_> { + StoragePartition { + data: self.data, + lower_bound: self.lower_bound, + upper_bound: self.upper_bound, + } + } + + type IntoIterMut = impl Iterator; + fn into_iter_mut(self) -> Self::IntoIterMut { let iter = match (self.lower_bound, self.upper_bound) { (Some(lower), Some(upper)) => Box::new(self.data.range(lower..upper)) - as Box)>>, + as Box)>>, (Some(lower), None) => Box::new(self.data.range(lower..)), (None, Some(upper)) => Box::new(self.data.range(..upper)), (None, None) => Box::new(self.data.iter()), @@ -139,25 +146,33 @@ impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> }) } - fn partition_at(self, entity: E) -> (Self, Self) { + fn into_mut(self, entity: Self::RawEntity) -> Option<&'t mut Self::Comp> { self.assert_bounds(entity); + let cell = self.data.get(&entity)?; + unsafe { + // Safety: StoragePartition locks all keys under `self.cmp` exclusively, + // and our key is under `self.cmp`. + // We already have `&mut self`, so no other threads are accessing this range. + Some(&mut *cell.get()) + } + } + + fn split_out(&mut self, entity: RawT) -> Self { + self.assert_bounds(entity); + + let right = Self { + data: self.data, + lower_bound: Some(entity), + upper_bound: self.upper_bound, + }; + self.upper_bound = Some(entity); + // Safety: `entity` is between lower_bound and upper_bound, // so the resultant bound will be non-overlapping. // We already have `&mut self`, so this range cannot be used until the partitions are // dropped. - ( - Self { - data: self.data, - lower_bound: self.lower_bound, - upper_bound: Some(entity), - }, - Self { - data: self.data, - lower_bound: Some(entity), - upper_bound: self.upper_bound, - }, - ) + right } } diff --git a/src/storage/vec.rs b/src/storage/vec.rs index 627ca8b921..69cb06929a 100644 --- a/src/storage/vec.rs +++ b/src/storage/vec.rs @@ -5,18 +5,20 @@ use std::mem::{self, MaybeUninit}; use bitvec::prelude::BitVec; use bitvec::slice::BitSlice; -use super::{ChunkMut, ChunkRef, Chunked, Storage}; +use super::{ + Access, AccessChunked, ChunkMut, ChunkRef, Chunked, Partition, PartitionChunked, Storage, +}; use crate::{entity, util}; /// The basic storage indexed by entity IDs directly. -pub struct VecStorage { +pub struct VecStorage { cardinality: usize, bits: BitVec, data: Vec>, - _ph: PhantomData, + _ph: PhantomData, } -impl VecStorage { +impl VecStorage { fn bit(&self, index: usize) -> bool { match self.bits.get(index) { Some(bit) => *bit, @@ -45,7 +47,7 @@ impl VecStorage { } } -impl Default for VecStorage { +impl Default for VecStorage { fn default() -> Self { Self { cardinality: 0, @@ -56,38 +58,40 @@ impl Default for VecStorage { } } -// Safety: the backend of `get`/`get_mut` is a slice. -// Assuming `E` implements Eq + Ord correctly, -// slices are injective because they are simply memory mapping. -unsafe impl Storage for VecStorage { - type RawEntity = E; +impl Access for VecStorage { + type RawEntity = RawT; type Comp = C; - fn get(&self, id: E) -> Option<&C> { + fn get_mut(&mut self, id: RawT) -> Option<&mut C> { let index = id.to_primitive(); if self.bit(index) { - let value = self.data.get(index).expect("bits mismatch"); - let value = unsafe { value.assume_init_ref() }; + let value = self.data.get_mut(index).expect("bits mismatch"); + let value = unsafe { value.assume_init_mut() }; Some(value) } else { None } } - fn get_mut(&mut self, id: E) -> Option<&mut C> { + type IterMut<'t> = impl Iterator + 't; + fn iter_mut(&mut self) -> Self::IterMut<'_> { iter_mut(0, &self.bits, &mut self.data) } +} + +impl Storage for VecStorage { + fn get(&self, id: RawT) -> Option<&C> { let index = id.to_primitive(); if self.bit(index) { - let value = self.data.get_mut(index).expect("bits mismatch"); - let value = unsafe { value.assume_init_mut() }; + let value = self.data.get(index).expect("bits mismatch"); + let value = unsafe { value.assume_init_ref() }; Some(value) } else { None } } - fn set(&mut self, id: E, new: Option) -> Option { + fn set(&mut self, id: RawT, new: Option) -> Option { let index = id.to_primitive(); let old = if self.bit(index) { @@ -119,13 +123,13 @@ unsafe impl Storage for VecStorage usize { self.cardinality } - type Iter<'t> = impl Iterator + 't; + type Iter<'t> = impl Iterator + 't; fn iter(&self) -> Self::Iter<'_> { let indices = self.bits.iter_ones(); let data = &self.data; indices.map(move |index| { - let entity = E::from_primitive(index); + let entity = RawT::from_primitive(index); let value = data.get(index).expect("bits mismatch"); let value = unsafe { value.assume_init_ref() }; (entity, value) @@ -136,34 +140,31 @@ unsafe impl Storage for VecStorage Self::IterChunks<'_> { new_iter_chunks_ref(&self.bits, &self.data[..]).map(|(start_index, chunk)| ChunkRef { slice: unsafe { slice_assume_init_ref(chunk) }, - start: E::from_primitive(start_index), + start: RawT::from_primitive(start_index), }) } - type IterMut<'t> = impl Iterator + 't; - fn iter_mut(&mut self) -> Self::IterMut<'_> { iter_mut(0, &self.bits, &mut self.data) } - type IterChunksMut<'t> = impl Iterator> + 't; fn iter_chunks_mut(&mut self) -> Self::IterChunksMut<'_> { new_iter_chunks_mut(&self.bits, &mut self.data[..]).map(|(start_index, chunk)| ChunkMut { slice: unsafe { slice_assume_init_mut(chunk) }, - start: E::from_primitive(start_index), + start: RawT::from_primitive(start_index), }) } - type Partition<'t> = StoragePartition<'t, E, C>; + type Partition<'t> = StoragePartition<'t, RawT, C>; fn as_partition(&mut self) -> Self::Partition<'_> { self.as_partition_chunk() } } -fn iter_mut<'storage, E: entity::Raw, C: 'static>( +fn iter_mut<'storage, RawT: entity::Raw, C: 'static>( start_offset: usize, bits: &'storage bitvec::slice::BitSlice, data: &'storage mut [MaybeUninit], -) -> impl Iterator + 'storage { +) -> impl Iterator + 'storage { let indices = bits.iter_ones(); indices.map(move |index| { - let entity = E::from_primitive(start_offset + index); + let entity = RawT::from_primitive(start_offset + index); let value = data.get_mut(index).expect("bits mismatch"); let value = unsafe { value.assume_init_mut() }; let value = unsafe { mem::transmute::<&mut C, &mut C>(value) }; @@ -171,18 +172,28 @@ fn iter_mut<'storage, E: entity::Raw, C: 'static>( }) } -/// Return value of [`VecStorage::partition_at`]. -pub struct StoragePartition<'t, E: entity::Raw, C> { +/// Return value of [`VecStorage::split_at`]. +pub struct StoragePartition<'t, RawT: entity::Raw, C> { bits: &'t BitSlice, data: &'t mut [MaybeUninit], offset: usize, - _ph: PhantomData, + _ph: PhantomData, } -impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> - for StoragePartition<'t, E, C> +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> Access for StoragePartition<'t, RawT, C> { + type RawEntity = RawT; + type Comp = C; + + fn get_mut(&mut self, entity: RawT) -> Option<&mut C> { self.by_ref().into_mut(entity) } + + type IterMut<'u> = impl Iterator + 'u where Self: 'u; + fn iter_mut(&mut self) -> Self::IterMut<'_> { self.by_ref().into_iter_mut() } +} + +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> Partition<'t> + for StoragePartition<'t, RawT, C> { - type ByRef<'u> = StoragePartition<'u, E, C> where Self: 'u; + type ByRef<'u> = StoragePartition<'u, RawT, C> where Self: 'u; fn by_ref(&mut self) -> Self::ByRef<'_> { StoragePartition { bits: self.bits, @@ -192,7 +203,10 @@ impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> } } - fn get_mut(&mut self, entity: E) -> Option<&mut C> { + type IntoIterMut = impl Iterator; + fn into_iter_mut(self) -> Self::IntoIterMut { iter_mut(self.offset, self.bits, self.data) } + + fn into_mut(self, entity: RawT) -> Option<&'t mut C> { let index = match entity.to_primitive().checked_sub(self.offset) { Some(index) => index, None => panic!("Entity {entity:?} is not in the partition {:?}..", self.offset), @@ -202,42 +216,45 @@ impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::Partition<'t, E, C> let value = self.data.get_mut(index).expect("bits mismatch"); Some(unsafe { value.assume_init_mut() }) } - Some(_) => None, - None => panic!( - "Entity {entity:?} is not in the partition ..{:?}", - self.offset + self.bits.len() - ), + _ => None, } } - type IterMut = impl Iterator; - fn iter_mut(self) -> Self::IterMut { iter_mut(self.offset, self.bits, self.data) } - - fn partition_at(self, entity: E) -> (Self, Self) { + fn split_out(&mut self, entity: RawT) -> Self { let index = entity.to_primitive().checked_sub(self.offset).expect("parameter out of bounds"); - assert!(index < self.bits.len()); - let bits = self.bits.split_at(index); - let data = self.data.split_at_mut(index); - ( - StoragePartition { - bits: bits.0, - data: data.0, - offset: self.offset, - _ph: PhantomData, - }, - StoragePartition { - bits: bits.1, - data: data.1, + + if index > self.bits.len() { + return Self { + bits: BitSlice::empty(), + data: &mut [], offset: self.offset + index, _ph: PhantomData, - }, - ) + }; + } + assert!( + index <= self.bits.len(), + "split at {index} for partition {}..{}", + self.offset, + self.offset + self.bits.len() + ); + + let (bits_left, bits_right) = self.bits.split_at(index); + self.bits = bits_left; + + let data_right = self.data.take_mut(index..).expect("index < self.data.len()"); + + Self { + bits: bits_right, + data: data_right, + offset: self.offset + index, + _ph: PhantomData, + } } } -unsafe impl Chunked for VecStorage { - fn get_chunk(&self, start: Self::RawEntity, end: Self::RawEntity) -> Option<&[Self::Comp]> { +impl AccessChunked for VecStorage { + fn get_chunk_mut(&mut self, start: RawT, end: RawT) -> Option<&mut [C]> { let range = start.to_primitive()..end.to_primitive(); let bits = match self.bits.get(range.clone()) { Some(bits) => bits, @@ -247,16 +264,16 @@ unsafe impl Chunked for VecStorage Option<&mut [Self::Comp]> { +impl Chunked for VecStorage { + fn get_chunk(&self, start: RawT, end: RawT) -> Option<&[C]> { let range = start.to_primitive()..end.to_primitive(); let bits = match self.bits.get(range.clone()) { Some(bits) => bits, @@ -266,11 +283,9 @@ unsafe impl Chunked for VecStorage = Self::Partition<'u>; @@ -284,10 +299,18 @@ unsafe impl Chunked for VecStorage super::PartitionChunked<'t, E, C> - for StoragePartition<'t, E, C> +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> AccessChunked + for StoragePartition<'t, RawT, C> +{ + fn get_chunk_mut(&mut self, start: RawT, end: RawT) -> Option<&mut [C]> { + self.by_ref().into_chunk_mut(start, end) + } +} + +impl<'t, RawT: entity::Raw, C: Send + Sync + 'static> PartitionChunked<'t> + for StoragePartition<'t, RawT, C> { - fn get_chunk_mut(&mut self, start: E, end: E) -> Option<&mut [C]> { + fn into_chunk_mut(self, start: RawT, end: RawT) -> Option<&'t mut [C]> { let (start, end) = (start.to_primitive() - self.offset, end.to_primitive() - self.offset); let range = start..end; @@ -306,14 +329,14 @@ impl<'t, E: entity::Raw, C: Send + Sync + 'static> super::PartitionChunked<'t, E Some(unsafe { slice_assume_init_mut(data) }) } - type IntoIterChunksMut = impl Iterator; + type IntoIterChunksMut = impl Iterator; fn into_iter_chunks_mut(self) -> Self::IntoIterChunksMut { // check correctness: // `bits[i]` corresponds to `self.data[i]`, of which the index `i` matches `(last_zero ?? -1) + 1 + i` let iter = new_iter_chunks_mut(self.bits, self.data); let offset = self.offset; iter.map(move |(start_index, chunk)| { - (E::from_primitive(start_index + offset), unsafe { slice_assume_init_mut(chunk) }) + (RawT::from_primitive(start_index + offset), unsafe { slice_assume_init_mut(chunk) }) }) } } diff --git a/src/system.rs b/src/system.rs index d752c47b92..51a8c3b5e6 100644 --- a/src/system.rs +++ b/src/system.rs @@ -13,23 +13,23 @@ use std::any::TypeId; use crate::entity::{ealloc, referrer}; use crate::world; use crate::world::offline; +pub use crate::world::rw::isotope::read::full::ReadIsotopeFull; +pub use crate::world::rw::isotope::read::partial::ReadIsotopePartial; +pub use crate::world::rw::isotope::write::full::WriteIsotopeFull; +pub use crate::world::rw::isotope::write::partial::WriteIsotopePartial; +pub use crate::world::rw::simple::{ReadSimple, WriteSimple}; -mod rw; -pub use rw::{ - Mut, MutChunk, MutFull, MutFullChunk, MutPartition, MutPartitionChunk, Read, ReadChunk, - ReadIsotope, ReadIsotopeRef, ReadSimple, Write, WriteIsotope, WriteSimple, -}; +pub mod access; +pub use access::{Isotope as AccessIsotope, Single as AccessSingle}; -pub(crate) mod accessor; -pub use accessor::{Accessor, Chunked as ChunkedAccessor}; +pub mod iter; +pub use iter::{EntityIterator, IntoZip, Try, Zip, ZipChunked}; pub mod partition; pub use partition::{EntityCreationPartition, Partition}; -mod entity; -#[doc(hidden)] -pub use entity::{entity_iterator, EntityCreatorImpl, EntityDeleterImpl}; -pub use entity::{EntityCreator, EntityDeleter, EntityIterator}; +mod offline_buffer; +pub use offline_buffer::{EntityCreator, EntityDeleter}; pub mod spec; #[doc(inline)] @@ -62,7 +62,7 @@ pub trait Descriptor { /// /// There may be multiple instances of the same implementor type. /// This is meaningful as they may have different states. -pub trait Sendable: Send + Descriptor { +pub trait Sendable: Send + Descriptor + 'static { /// Runs the system. fn run( &mut self, @@ -79,7 +79,7 @@ pub trait Sendable: Send + Descriptor { /// A variant of [`Sendable`] that runs on the main thread only, /// but allows storing [`Send`] states /// and accessing non-[Send] + [Sync] global states. -pub trait Unsendable: Descriptor { +pub trait Unsendable: Descriptor + 'static { /// Runs the system. fn run( &mut self, diff --git a/src/system/access.rs b/src/system/access.rs new file mode 100644 index 0000000000..9231b127d1 --- /dev/null +++ b/src/system/access.rs @@ -0,0 +1,8 @@ +//! Access component storages in the world. + +pub mod single; +pub use single::Single; + +pub mod isotope; +pub use isotope::Isotope; +pub(crate) use isotope::{PartialStorageMap, StorageMap, StorageMapMut}; diff --git a/src/system/access/isotope.rs b/src/system/access/isotope.rs new file mode 100644 index 0000000000..7e70a0d3c3 --- /dev/null +++ b/src/system/access/isotope.rs @@ -0,0 +1,351 @@ +//! Traits for accessing a single component storage. +//! +//! See [`AccessIsotope`](Isotope) for documentation. + +use std::marker::PhantomData; +use std::{any, fmt, ops}; + +use derive_trait::derive_trait; + +use crate::storage::Access as _; +use crate::system::AccessSingle; +use crate::{comp, entity, Archetype, Storage as _}; + +/// Accesses multiple storages for the same isotope. +pub struct Isotope { + storages: StorageMapT, + _ph: PhantomData<(A, C)>, +} + +impl Isotope { + pub(crate) fn new(storages: StorageMapT) -> Self { Self { storages, _ph: PhantomData } } +} + +/// Implements the access pattern for multiple isotope storages. +pub trait StorageMap +where + A: Archetype, + C: comp::Isotope, +{ + /// The key from the user, equivalent to [`comp::discrim::Set::Key`] + type Key: fmt::Debug + Copy + 'static; + + /// Retrieves a storage by key. + /// Panics if the key is not supported. + /// + /// For partial accessors, this should return the storage + /// for the discriminant indexed by the key, + /// or panic if the key is out of bounds. + /// + /// For full accessors, this should return the storage for the given discriminant, + /// or initialize the storage lazily. + fn get_storage(&mut self, key: Self::Key) -> &C::Storage; + + /// Equivalent to calling [`Self::get_storage`] for each key. + /// + /// Duplicate keys are allowed because the return type is immutable. + /// The mutability is only used for lazy initialization. + fn get_storage_many(&mut self, keys: [Self::Key; N]) -> [&C::Storage; N]; + + /// Return value of [`iter_keys`](Self::iter_keys). + type IterKeys<'t>: Iterator + 't + where + Self: 't; + /// Iterates over all keys currently accessible from this accessor. + /// + /// For partial accessors, this is the set of keys to the discriminants provided by the user. + /// + /// For full accessors, this is the set of discriminants that have been initialized. + fn iter_keys(&self) -> Self::IterKeys<'_>; + + /// Storage type yielded by [`iter_values`](Self::iter_values). + type IterValue: ops::Deref; + /// Return value of [`iter_values`](Self::iter_values). + type IterValues<'t>: Iterator + 't + where + Self: 't; + /// Iterates over all storages currently accessible from this accessor. + /// + /// For partial accessors, this is the set of keys to the discriminants provided by the user. + /// + /// For full accessors, this is the set of discriminants that have been initialized. + fn iter_values(&self) -> Self::IterValues<'_>; +} + +/// Like [`StorageMap`] but can access a storage without `&mut self`. +/// +/// Only available for partial accessors, +/// because a full accessor needs to mutate its local copy of storage map. +pub trait PartialStorageMap: StorageMap +where + A: Archetype, + C: comp::Isotope, +{ + /// Retrieves a storage by key like [`get_storage`](StorageMap::get_storage), + /// but without exclusively borrowing the accessor. + fn get_storage_ref(&self, key: Self::Key) -> &C::Storage; +} + +/// Implements the access pattern for multiple isotope storages. +pub trait StorageMapMut: StorageMap +where + A: Archetype, + C: comp::Isotope, +{ + /// Retrieves a storage by key. + /// Panics if the key is not supported. + /// + /// For partial accessors, this should return the storage + /// for the discriminant indexed by the key, + /// or panic if the key is out of bounds. + /// + /// For full accessors, this should return the storage for the given discriminant, + /// or initialize the storage lazily. + fn get_storage_mut(&mut self, key: Self::Key) -> &mut C::Storage; + + /// Retrieves storages by disjoint keys. + /// Panics if any key is not supported or is equal to another key. + fn get_storage_mut_many( + &mut self, + keys: [Self::Key; N], + ) -> [&mut C::Storage; N]; +} + +#[derive_trait(pub Get{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::Isotope = C; + /// The key for the discriminant set, `Comp::Discrim` for full accessors, typically `usize` for partial accessors + type Key: fmt::Debug + Copy + 'static = KeyT; +})] +impl Isotope +where + A: Archetype, + C: comp::Isotope, + KeyT: fmt::Debug + Copy + 'static, + StorageMapT: StorageMap, +{ + /// Retrieves the component for the given entity and discriminant. + /// + /// This method is infallible for correctly implemented `comp::Must`, + /// which returns the auto-initialized value for missing components. + pub fn get(&mut self, entity: impl entity::Ref, discrim: KeyT) -> &C + where + C: comp::Must, + { + match self.try_get(entity, discrim) { + Some(value) => value, + None => panic!( + "{}: comp::Must<{}> but has no default initializer", + any::type_name::(), + any::type_name::() + ), + } + } + + /// Returns an immutable reference to the component for the specified entity and discriminant, + /// or the default value for isotopes with a default initializer or `None` + /// if the component is not present in the entity. + pub fn try_get(&mut self, entity: impl entity::Ref, key: KeyT) -> Option<&C> { + let storage = self.storages.get_storage(key); + storage.get(entity.id()) + } + + /// Iterates over all known discriminants of the component type. + /// + /// The yielded discriminants are not in any guaranteed order. + pub fn known_discrims<'t>( + &'t self, + ) -> impl Iterator>::Discrim> + 't { + self.storages.iter_keys().map(|(_key, discrim)| discrim) + } + + /// Iterates over all known isotopes for a specific entity. + pub fn get_all<'t, E: entity::Ref>( + &'t self, + entity: E, + ) -> impl Iterator>::Discrim, &'t C)> + 't { + // workaround for https://github.com/rust-lang/rust/issues/65442 + fn without_e( + getter: &impl StorageMap, + id: ::RawEntity, + ) -> impl Iterator + '_ + where + A: Archetype, + C: comp::Isotope, + { + getter + .iter_values() + .filter_map(move |(_key, discrim, storage)| Some((discrim, storage.get(id)?))) + } + + without_e(&self.storages, entity.id()) + } + + /// Iterates over all components of a specific discriminant. + /// + /// Note that the initializer is not called for lazy-initialized isotope components. + /// To avoid confusing behavior, do not use this function if [`C: comp::Must`](comp::Must). + pub fn iter<'t>( + &'t mut self, + key: KeyT, + ) -> impl Iterator, &'t C)> { + let storage = self.storages.get_storage(key); + storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) + } + + /// Splits the accessor into multiple mmutable [`AccessSingle`] accessors + /// so that they can be used independently. + pub fn split<'t, const N: usize>( + &'t mut self, + keys: [KeyT; N], + ) -> [AccessSingle>::Storage> + 't>; + N] { + let storages = self.storages.get_storage_many(keys); + storages.map(|storage| AccessSingle::new(storage)) + } +} + +#[derive_trait(pub GetRef{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::Isotope = C; + /// The key for the discriminant set, `Comp::Discrim` for full accessors, typically `usize` for partial accessors + type Key: fmt::Debug + Copy + 'static = KeyT; +})] +impl Isotope +where + A: Archetype, + C: comp::Isotope, + KeyT: fmt::Debug + Copy + 'static, + StorageMapT: PartialStorageMap, +{ + /// Retrieves the component for the given entity and discriminant. + /// + /// Identical to [`get`](Isotope::get) but does not require a mutable receiver. + pub fn get_ref(&self, entity: impl entity::Ref, key: KeyT) -> &C + where + C: comp::Must, + { + match self.try_get_ref(entity, key) { + Some(value) => value, + None => panic!( + "{}: comp::Must<{}> but has no default initializer", + any::type_name::(), + any::type_name::() + ), + } + } + + /// Returns an immutable reference to the component for the specified entity and discriminant, + /// or the default value for isotopes with a default initializer or `None` + /// if the component is not present in the entity. + /// + /// Identical to [`try_get`](Isotope::try_get) but does not require a mutable receiver. + pub fn try_get_ref>(&self, entity: E, key: KeyT) -> Option<&C> { + let storage = self.storages.get_storage_ref(key); + storage.get(entity.id()) + } + + /// Iterates over all components of a specific discriminant. + /// + /// Identical to [`iter`](Isotope::iter) but does not require a mutable receiver. + pub fn iter_ref<'t>( + &'t self, + key: KeyT, + ) -> impl Iterator, &'t C)> { + let storage = self.storages.get_storage_ref(key); + storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) + } +} + +#[derive_trait(pub GetMut{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::Isotope = C; + /// The key for the discriminant set, `Comp::Discrim` for full accessors, typically `usize` for partial accessors + type Key: fmt::Debug + Copy + 'static = KeyT; +})] +impl Isotope +where + A: Archetype, + C: comp::Isotope, + KeyT: fmt::Debug + Copy + 'static, + StorageMapT: StorageMapMut, +{ + /// Retrieves the component for the given entity and discriminant. + /// + /// This method is infallible for correctly implemented `comp::Must`, + /// which returns the auto-initialized value for missing components. + pub fn get_mut(&mut self, entity: impl entity::Ref, discrim: KeyT) -> &mut C + where + C: comp::Must, + { + match self.try_get_mut(entity, discrim) { + Some(value) => value, + None => panic!( + "{}: comp::Must<{}> but has no default initializer", + any::type_name::(), + any::type_name::() + ), + } + } + + /// Returns a mutable reference to the component for the specified entity and discriminant, + /// automatically initialized with the default initializer if present, + /// or `None` if the component is unset and has no default initializer. + /// + /// Note that this method returns `Option<&mut C>`, not `&mut Option`. + /// This means setting the Option itself to `Some`/`None` will not modify any stored value. + /// Use [`set`](Isotope::set) to add/remove a component. + pub fn try_get_mut( + &mut self, + entity: impl entity::Ref, + key: KeyT, + ) -> Option<&mut C> { + let storage = self.storages.get_storage_mut(key); + storage.get_mut(entity.id()) + } + + /// Overwrites the component for the specified entity and discriminant. + /// + /// Passing `None` to this method removes the component from the entity. + pub fn set>( + &mut self, + entity: E, + key: KeyT, + value: Option, + ) -> Option { + let storage = self.storages.get_storage_mut(key); + storage.set(entity.id(), value) + } + + /// Iterates over mutable references to all components of a specific discriminant. + pub fn iter_mut<'t>( + &'t mut self, + key: KeyT, + ) -> impl Iterator, &'t mut C)> { + let storage = self.storages.get_storage_mut(key); + storage.iter_mut().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) + } + + /// Splits the accessor into multiple mutable [`AccessSingle`] accessors + /// so that they can be used in entity iteration independently. + pub fn split_isotopes<'t, const N: usize>( + &'t mut self, + keys: [KeyT; N], + ) -> [AccessSingle< + A, + C, + impl ops::DerefMut>::Storage> + 't, + >; N] { + let storages = self.storages.get_storage_mut_many(keys); + storages.map(|storage| AccessSingle::new(storage)) + } +} + +#[cfg(test)] +mod tests; diff --git a/src/system/access/isotope/tests.rs b/src/system/access/isotope/tests.rs new file mode 100644 index 0000000000..a672bb7b0d --- /dev/null +++ b/src/system/access/isotope/tests.rs @@ -0,0 +1,208 @@ +//! Tests isotope storage access. + +#![allow(clippy::ptr_arg)] + +use crate::test_util::*; +use crate::{system, system_test, tracer, world}; + +fn isotope_discrim_read_test_system( + mut iso1: impl system::access::isotope::Get, + mut iso2: impl system::access::isotope::Get, + initials: &InitialEntities, +) { + let ent = initials.strong.as_ref().expect("initials.strong is None"); + + { + let iso = iso1.try_get(ent, TestDiscrim1(11)); + assert_eq!(iso, Some(&IsoNoInit(3))); + } + + // should not panic on nonexistent storages + { + let iso = iso1.try_get(ent, TestDiscrim1(17)); + assert_eq!(iso, None); + } + + // should return default value for autoinit isotopes + { + let iso = iso2.try_get(ent, TestDiscrim2(71)); + assert_eq!(iso, Some(&IsoWithInit(73))); + } + + let map = iso1.get_all(ent); + let mut map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); + map_vec.sort_by_key(|(TestDiscrim1(discrim), _)| *discrim); + assert_eq!(map_vec, vec![(TestDiscrim1(11), &IsoNoInit(3)), (TestDiscrim1(13), &IsoNoInit(5))]); +} + +fn isotope_discrim_test_world(system: impl system::Sendable) -> world::World { + let mut world = system_test!(system;); + + let ent = world.create(crate::comps![@(crate) TestArch => + @(TestDiscrim1(11), IsoNoInit(3)), + @(TestDiscrim1(13), IsoNoInit(5)), + ]); + world.get_global::().strong = Some(ent); + + world +} + +#[test] +fn test_full_isotope_discrim_write() { + #[system(dynec_as(crate))] + fn test_sys( + iso1: system::WriteIsotopeFull, + iso2: system::WriteIsotopeFull, + #[dynec(global)] initials: &InitialEntities, + ) { + isotope_discrim_read_test_system(iso1, iso2, initials); + } + + let mut world = isotope_discrim_test_world(test_sys.build()); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +fn test_full_isotope_discrim_read() { + #[system(dynec_as(crate))] + fn test_system( + iso1: system::ReadIsotopeFull, + iso2: system::ReadIsotopeFull, + #[dynec(global)] initials: &InitialEntities, + ) { + isotope_discrim_read_test_system(iso1, iso2, initials) + } + + let mut world = isotope_discrim_test_world(test_system.build()); + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +fn test_partial_isotope_discrim_write() { + partial_isotope_discrim_write( + vec![TestDiscrim1(7), TestDiscrim1(11), TestDiscrim1(17), TestDiscrim1(19)], + vec![ + (0, Some(IsoNoInit(2)), Some(None)), + (1, Some(IsoNoInit(3)), Some(Some(IsoNoInit(23)))), + (2, None, None), + (3, None, Some(Some(IsoNoInit(29)))), + ], + vec![(TestDiscrim1(11), IsoNoInit(23)), (TestDiscrim1(19), IsoNoInit(29))], + ); +} + +#[test] +fn test_partial_isotope_discrim_read() { + partial_isotope_discrim_read( + vec![TestDiscrim1(11), TestDiscrim1(17)], + vec![(0, Some(IsoNoInit(3))), (1, None)], + vec![(TestDiscrim1(11), IsoNoInit(3))], + ); +} + +#[test] +#[should_panic = "The index 42 is not available in the isotope request for \ + dynec::test_util::TestArch/dynec::test_util::isotope_comps::IsoNoInit"] +fn test_partial_isotope_discrim_read_panic() { + partial_isotope_discrim_read(vec![TestDiscrim1(11)], vec![(42, None)], vec![]); +} + +fn partial_isotope_discrim_read( + req_discrims: Vec, + single_expects: Vec<(usize, Option)>, + expect_all: Vec<(TestDiscrim1, IsoNoInit)>, +) { + #[system(dynec_as(crate))] + fn test_system( + #[dynec(param)] _req_discrims: &Vec, + #[dynec(param)] single_expects: &Vec<(usize, Option)>, + #[dynec(param)] expect_all: &Vec<(TestDiscrim1, IsoNoInit)>, + #[dynec(isotope(discrim = _req_discrims))] mut iso1: system::ReadIsotopePartial< + TestArch, + IsoNoInit, + >, + #[dynec(global)] initials: &InitialEntities, + ) { + let ent = initials.strong.as_ref().expect("initials.strong is None"); + + for (discrim, expect) in single_expects { + let iso = iso1.try_get(ent, *discrim); + assert_eq!(iso, expect.as_ref()); + } + + // should only include requested discriminants + let map = iso1.get_all(ent); + let mut map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); + map_vec.sort_by_key(|(TestDiscrim1(discrim), _)| *discrim); + let expect_all = + expect_all.iter().map(|(discrim, iso)| (*discrim, iso)).collect::>(); + assert_eq!(map_vec, expect_all); + } + + let mut world = system_test!(test_system.build(req_discrims, single_expects, expect_all);); + + let ent = world.create(crate::comps![@(crate) TestArch => + @(TestDiscrim1(11), IsoNoInit(3)), + @(TestDiscrim1(13), IsoNoInit(5)), + ]); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +#[should_panic = "The index 42 is not available in the isotope request for \ + dynec::test_util::TestArch/dynec::test_util::isotope_comps::IsoNoInit"] +fn test_partial_isotope_discrim_write_panic() { + partial_isotope_discrim_write(vec![TestDiscrim1(11)], vec![(42, None, None)], vec![]); +} + +type SingleExpectUpdate = (usize, Option, Option>); + +fn partial_isotope_discrim_write( + req_discrims: Vec, + single_expect_updates: Vec, + expect_all: Vec<(TestDiscrim1, IsoNoInit)>, +) { + #[system(dynec_as(crate))] + fn test_system( + #[dynec(param)] _req_discrims: &Vec, + #[dynec(param)] single_expect_updates: &mut Vec, + #[dynec(param)] expect_all: &Vec<(TestDiscrim1, IsoNoInit)>, + #[dynec(isotope(discrim = _req_discrims))] mut iso1: system::WriteIsotopePartial< + TestArch, + IsoNoInit, + >, + #[dynec(global)] initials: &InitialEntities, + ) { + let ent = initials.strong.as_ref().expect("initials.strong is None"); + + for (discrim, mut expect, update) in single_expect_updates.drain(..) { + let iso = iso1.try_get_mut(ent, discrim); + assert_eq!(iso, expect.as_mut()); + if let Some(update) = update { + iso1.set(ent, discrim, update); + } + } + + // should only include requested discriminants + let map = iso1.get_all(ent); + let map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); + let expect_all = + expect_all.iter().map(|(discrim, iso)| (*discrim, iso)).collect::>(); + assert_eq!(map_vec, expect_all); + } + + let mut world = + system_test!(test_system.build(req_discrims, single_expect_updates, expect_all);); + + let ent = world.create(crate::comps![@(crate) TestArch => + @(TestDiscrim1(7), IsoNoInit(2)), + @(TestDiscrim1(11), IsoNoInit(3)), + @(TestDiscrim1(13), IsoNoInit(5)), + ]); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); +} diff --git a/src/system/access/single.rs b/src/system/access/single.rs new file mode 100644 index 0000000000..9cc4bb1d5b --- /dev/null +++ b/src/system/access/single.rs @@ -0,0 +1,441 @@ +//! Traits for accessing a single component storage. +//! +//! See [`AccessSingle`](Single) for documentation. + +use std::marker::PhantomData; +use std::{any, ops}; + +use derive_trait::derive_trait; +use rayon::prelude::ParallelIterator; + +use crate::entity::{self, ealloc, Raw as _}; +use crate::storage::{self, Access as _, Chunked as _}; +use crate::{comp, util, Archetype, Storage}; + +/// Access a single component storage, i.e. a simple archetyped component +/// or an isotope archetyped component for a single discriminant. +pub struct Single { + storage: StorageRef, + _ph: PhantomData<(A, C)>, +} + +impl Single { + pub(crate) fn new(storage: StorageRef) -> Self { Self { storage, _ph: PhantomData } } +} + +#[derive_trait(pub Get{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::Deref + Sync, + StorageRef::Target: Storage::RawEntity, Comp = C>, +{ + /// Returns an immutable reference to the component for the specified entity, + /// or `None` if the component is not present in the entity. + pub fn try_get(&self, entity: impl entity::Ref) -> Option<&C> { + self.storage.get(entity.id()) + } + + /// Iterates over all initialized components in this storage. + pub fn iter<'t>(&'t self) -> impl Iterator, &'t C)> + 't { + self.storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) + } +} + +#[derive_trait(pub MustGet{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope + comp::Must = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::Deref + Sync, + StorageRef::Target: Storage::RawEntity, Comp = C>, +{ + /// Returns an immutable reference to the component for the specified entity. + /// + /// This function is infallible, assuming [`comp::Must`] is only implemented + /// for components with [`Required`](comp::Presence::Required) presence. + /// + /// # Panics + /// This function panics if the entity is not fully initialized yet. + /// This happens when an entity is newly created and the cycle hasn't joined yet. + pub fn get(&self, entity: impl entity::Ref) -> &C { + match self.try_get(entity) { + Some(comp) => comp, + None => panic!( + "Component {}/{} implements comp::Must but is not present", + any::type_name::(), + any::type_name::() + ), + } + } + + /// Iterates over chunks of entities in parallel. + /// + /// This returns a [rayon `ParallelIterator`](rayon::iter::ParallelIterator) + /// that processes different chunks of entities + /// + /// Requires [`comp::Must`] because this iterator assumes that + /// existence in `snapshot` implies existence in storage. + pub fn par_iter<'t>( + &'t self, + snapshot: &'t ealloc::Snapshot<::RawEntity>, + ) -> impl ParallelIterator, &'t C)> { + rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { + slice.iter_chunks().flat_map(<::RawEntity as entity::Raw>::range).map( + |id| { + let entity = entity::TempRef::new(id); + let data = self.get(entity); + (entity, data) + }, + ) + }) + } +} + +#[derive_trait(pub GetChunked { + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::Deref + Sync, + StorageRef::Target: storage::Chunked::RawEntity, Comp = C>, +{ + /// Returns the chunk of components as a slice. + /// + /// # Panics + /// This function panics if any component in the chunk is missing. + /// In general, users should not get an [`entity::TempRefChunk`] + /// that includes an uninitialized entity, + /// so panic is basically impossible if [`comp::Must`] was implemented correctly. + pub fn get_chunk(&self, chunk: entity::TempRefChunk) -> &[C] { + self.storage.get_chunk(chunk.start, chunk.end).expect("chunk is not completely filled") + } +} + +#[derive_trait(pub MustGetChunked{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope + comp::Must = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::Deref + Sync, + StorageRef::Target: storage::Chunked::RawEntity, Comp = C>, +{ + /// Iterates over chunks of entities in parallel. + /// + /// This returns a [rayon `ParallelIterator`](rayon::iter::ParallelIterator) + /// that processes different chunks of entities + pub fn par_iter_chunks<'t>( + &'t self, + snapshot: &'t ealloc::Snapshot<::RawEntity>, + ) -> impl ParallelIterator, &'t [C])> { + rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { + // we don't need to split over the holes in parallel, + // because splitting the total space is more important than splitting the holes + slice.iter_chunks().map(|chunk| { + let chunk = entity::TempRefChunk::new(chunk.start, chunk.end); + let data = self.get_chunk(chunk); + (chunk, data) + }) + }) + } +} + +#[derive_trait(pub GetMut{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: storage::Access::RawEntity, Comp = C>, +{ + /// Returns a mutable reference to the component for the specified entity, + /// or `None` if the component is not present in the entity. + /// + /// Note that this function returns `Option<&mut C>`, not `&mut Option`. + /// This means setting the Option itself to `Some`/`None` will not modify any stored value. + /// Use [`set`](Single::set) to add/remove a component. + pub fn try_get_mut(&mut self, entity: impl entity::Ref) -> Option<&mut C> { + self.storage.get_mut(entity.id()) + } + + /// Iterates over mutable references to all initialized components in this storage. + pub fn iter_mut<'t>( + &'t mut self, + ) -> impl Iterator, &'t mut C)> + 't { + self.storage.iter_mut().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) + } +} + +#[derive_trait(pub MustGetMut{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope + comp::Must = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: storage::Access::RawEntity, Comp = C>, +{ + /// Returns a mutable reference to the component for the specified entity. + /// + /// This function is infallible, assuming [`comp::Must`] is only implemented + /// for components with [`Required`](comp::Presence::Required) presence. + /// + /// # Panics + /// This function panics if the entity is not fully initialized yet. + /// This happens when an entity is newly created and the cycle hasn't joined yet. + pub fn get_mut(&mut self, entity: impl entity::Ref) -> &mut C { + match self.try_get_mut(entity) { + Some(comp) => comp, + None => panic!( + "Component {}/{} implements comp::Must but is not present", + any::type_name::(), + any::type_name::(), + ), + } + } +} + +#[derive_trait(pub Set{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: Storage::RawEntity, Comp = C>, +{ + /// Overwrites the component for the specified entity. + /// + /// Passing `None` to this function removes the component from the entity. + /// This leads to a panic for components with [`comp::Presence::Required`] presence. + pub fn set(&mut self, entity: impl entity::Ref, value: Option) -> Option { + self.storage.set(entity.id(), value) + } +} + +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: Storage, +{ + /// Converts the accessor to a mutably borrowed partition that covers all entities. + /// + /// The actual splitting partitions can be obtained + /// by calling [`split_at`](Single::split_at) on the returned value. + pub fn as_partition( + &mut self, + ) -> Single::Partition<'_>>> { + Single { storage: util::OwnedDeref(self.storage.as_partition()), _ph: PhantomData } + } +} + +#[derive_trait(pub MustSet{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope + comp::Must = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: Storage::RawEntity, Comp = C>, +{ + /// Iterates over all entities in parallel. + /// + /// This returns a rayon [`ParallelIterator`] that processes different entities. + pub fn par_iter_mut<'t>( + &'t mut self, + snapshot: &'t ealloc::Snapshot<::RawEntity>, + ) -> impl ParallelIterator, &'t mut C)> { + rayon::iter::split((self.as_partition(), snapshot.as_slice()), |(partition, slice)| { + let Some(midpt) = slice.midpoint_for_split() else { return ((partition, slice), None) }; + let (slice_left, slice_right) = slice.split_at(midpt); + let (partition_left, partition_right) = partition.split_at(midpt); + ((partition_left, slice_left), Some((partition_right, slice_right))) + }) + .flat_map_iter(|(partition, _slice)| partition.into_iter_mut()) + } +} + +impl<'t, A, C, StorageT> Single> +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageT: storage::Partition<'t, RawEntity = A::RawEntity, Comp = C>, +{ + /// Splits the accessor into two partitions. + /// + /// The first partition accesses all entities less than `entity`; + /// the second partition accesses all entities greater than or equal to `entity`. + pub fn split_at(mut self, entity: A::RawEntity) -> (Self, Self) { + let right = self.split_out(entity); + (self, right) + } + + /// Splits the accessor into two partitions without moving ownership. + /// + /// Entities less than `entity` are retained in `self`, + /// while entities greater than or equal to `entity` + /// are accessible through the returned partition. + pub fn split_out(&mut self, entity: A::RawEntity) -> Self { + let right = self.storage.0.split_out(entity); + Self { storage: util::OwnedDeref(right), _ph: PhantomData } + } + + /// Gets the component value of an entity accessible by this partition, + /// preserving the lifetime `'t` of this partition object. + pub fn try_into_mut(self, entity: impl entity::Ref) -> Option<&'t mut C> { + self.storage.0.into_mut(entity.id()) + } +} + +impl<'t, A, C, StorageT> Single> +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageT: storage::Partition<'t, RawEntity = A::RawEntity, Comp = C>, +{ + /// Gets the component value of an entity accessible by this partition, + /// preserving the lifetime `'t` of this partition object. + /// + /// This function is infallible, assuming [`comp::Must`] is only implemented + /// for components with [`Required`](comp::Presence::Required) presence. + /// + /// # Panics + /// This function panics if the entity is not fully initialized yet. + /// This happens when an entity is newly created and the cycle hasn't joined yet. + pub fn into_mut(self, entity: impl entity::Ref) -> &'t mut C { + match self.try_into_mut(entity) { + Some(comp) => comp, + None => panic!( + "Component {}/{} implements comp::Must but is not present", + any::type_name::(), + any::type_name::(), + ), + } + } + + /// Iterates over mutable references to all initialized components in this partition. + pub fn into_iter_mut(self) -> impl Iterator, &'t mut C)> { + self.storage.0.into_iter_mut().map(|(entity, data)| (entity::TempRef::new(entity), data)) + } +} + +#[derive_trait(pub GetMutChunked{ + /// The archetype that this accessor retrieves for. + type Arch: Archetype = A; + /// The component that this accessor retrieves. + type Comp: comp::SimpleOrIsotope + comp::Must = C; +})] +impl Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: storage::Chunked::RawEntity, Comp = C>, + for<'u> ::Partition<'u>: storage::PartitionChunked<'u>, +{ + /// Returns the chunk of components as a mutable slice. + /// + /// # Panics + /// This function panics if any component in the chunk is missing. + /// In general, if [`comp::Must`] is implemented correctly, + /// users should not obtain an [`entity::TempRefChunk`] that includes an uninitialized entity, + /// so panic is practically impossible. + pub fn get_chunk_mut(&mut self, chunk: entity::TempRefChunk) -> &[C] { + self.storage.get_chunk(chunk.start, chunk.end).expect("chunk is not completely filled") + } + + /// Iterates over all entity chunks in parallel. + /// + /// This returns a rayon [`ParallelIterator`] that processes different chunks of entities. + pub fn par_iter_chunks_mut<'t>( + &'t mut self, + snapshot: &'t ealloc::Snapshot<::RawEntity>, + ) -> impl ParallelIterator, &'t mut [C])> { + rayon::iter::split((self.as_partition(), snapshot.as_slice()), |(partition, slice)| { + let Some(midpt) = slice.midpoint_for_split() else { return ((partition, slice), None) }; + let (slice_left, slice_right) = slice.split_at(midpt); + let (partition_left, partition_right) = partition.split_at(midpt); + ((partition_left, slice_left), Some((partition_right, slice_right))) + }) + .flat_map_iter(|(partition, _slice)| partition.into_iter_chunks_mut()) + } +} + +impl<'t, A, C, StorageT> Single> +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageT: storage::PartitionChunked<'t, RawEntity = A::RawEntity, Comp = C>, +{ + /// Returns the chunk of components as a mutable slice, + /// preserving the lifetime `'t` of this partition object. + /// + /// # Panics + /// This function panics if any component in the chunk is missing. + /// In general, if [`comp::Must`] is implemented correctly, + /// users should not obtain an [`entity::TempRefChunk`] that includes an uninitialized entity, + /// so panic is practically impossible. + pub fn into_chunk_mut(self, chunk: entity::TempRefChunk) -> &'t mut [C] { + match self.storage.0.into_chunk_mut(chunk.start, chunk.end) { + Some(comp) => comp, + None => panic!( + "Component {}/{} implements comp::Must but is not present", + any::type_name::(), + any::type_name::() + ), + } + } + + /// Iterates over mutable references to all initialized components in this storage. + pub fn into_iter_chunks_mut( + self, + ) -> impl Iterator, &'t mut [C])> { + self.storage + .0 + .into_iter_chunks_mut() + .map(|(entity, data)| (entity::TempRefChunk::new(entity, entity.add(data.len())), data)) + } +} + +#[cfg(test)] +mod tests; diff --git a/src/system/access/single/tests.rs b/src/system/access/single/tests.rs new file mode 100644 index 0000000000..d7511d8981 --- /dev/null +++ b/src/system/access/single/tests.rs @@ -0,0 +1,30 @@ +//! Tests simple storage access. + +use crate::test_util::*; +use crate::{system, system_test, tracer}; + +#[test] +fn test_simple_fetch() { + #[system(dynec_as(crate))] + fn test_system( + mut comp5: system::WriteSimple, + #[dynec(global)] initials: &InitialEntities, + ) { + let ent = initials.strong.as_ref().expect("initials.strong is None"); + + let comp = comp5.get_mut(ent); + assert_eq!(comp.0, 7); + comp.0 += 13; + } + + let mut world = system_test!(test_system.build();); + + let ent = world.create(crate::comps![@(crate) TestArch => Simple5RequiredNoInit(7)]); + world.get_global::().strong = Some(ent.clone()); + + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp = storage.try_get(ent); + assert_eq!(comp, Some(&Simple5RequiredNoInit(20))); +} diff --git a/src/system/accessor.rs b/src/system/accessor.rs deleted file mode 100644 index a5eb52041f..0000000000 --- a/src/system/accessor.rs +++ /dev/null @@ -1,217 +0,0 @@ -//! Abstraction of entity storages for iteration. - -use std::marker::PhantomData; -use std::ops; - -use super::{rw, Read}; -use crate::storage::Chunked as _; -use crate::{comp, entity, storage, Archetype}; - -/// An accessor that can be used in an entity iteration. -/// -/// # Safety -/// Implementors must ensure that [`entity`](Self::entity) is deterministic and [one-to-one][injective]. -/// -/// Multiplexing implementors (such as tuples or composite accessors) -/// preserve this invariant automatically since they are just destructuring to independent storages. -/// Storage delegations preserve this invariant automatically -/// since [`Storage::get_mut`](crate::storage::Storage::get_mut) -/// has the same safety invariants -/// (see [`Storage` § Safety](crate::storage::Storage#safety)). -/// -/// [injective]: https://en.wikipedia.org/wiki/Injective_function -pub unsafe trait Accessor { - /// Return value of [`entity`](Self::entity). - type Entity<'t>: 't - where - Self: 't; - /// Accesses this storage for a specific entity. - /// - /// # Safety - /// The lifetime of the return value is arbitrarily defined by the caller. - /// This effectively disables the borrow checker for return values. - /// The caller must ensure that return values do not outlive `self`, - /// and the function result is dropped before it is called again with the same `id`. - unsafe fn entity<'this, 'e, 'ret>( - this: &'this mut Self, - id: entity::TempRef<'e, A>, - ) -> Self::Entity<'ret> - where - Self: 'ret; -} - -/// An accessor that can be used in chunked entity iteration. -/// -/// # Safety -/// Implementors must ensure that [`chunk`](Self::chunk) is deterministic, -/// and non-overlapping entity chunks return non-overlapping values. -/// This is equivalent to (and should delegate to) -/// [`crate::storage::Chunked::get_chunk`]/[`crate::storage::Chunked::get_chunk_mut`]. -/// -/// Multiplexing implementors (such as tuples or composite accessors) -/// preserve this invariant automatically since they are just destructuring to independent storages. -/// Storage delegations preserve this invariant automatically -/// since [`crate::storage::Chunked::get_chunk_mut`] has the same safety invariants -/// (see [`Chunked` § Safety](crate::storage::Chunked#safety)). -/// -/// [injective]: https://en.wikipedia.org/wiki/Injective_function -pub unsafe trait Chunked { - /// Return value of [`chunk`](Self::chunk). - type Chunk<'t>: 't - where - Self: 't; - /// Accesses this storage for a specific chunk of entities. - /// - /// # Safety - /// The lifetime of the return value is arbitrarily defined by the caller. - /// This effectively disables the borrow checker for return values. - /// The caller must ensure that return values do not outlive `self`, - /// and the function result is dropped before it is called again with an overlapping `chunk`. - unsafe fn chunk<'ret>(this: &mut Self, chunk: entity::TempRefChunk<'_, A>) - -> Self::Chunk<'ret>; -} - -/// Return value of [`Read::try_access`]. -pub struct TryRead(pub(super) T, pub(super) PhantomData<(A, C)>); - -unsafe impl Accessor for TryRead -where - A: Archetype, - C: 'static, - T: ops::Deref, - T::Target: rw::Read, -{ - type Entity<'ret> = Option<&'ret C> where Self: 'ret; - - unsafe fn entity<'ret>(this: &mut Self, id: entity::TempRef<'_, A>) -> Self::Entity<'ret> - where - Self: 'ret, - { - Some(&*(this.0.try_get(id)? as *const C)) - } -} - -/// Return value of [`Read::access`]. -pub struct MustRead(pub(super) T, pub(super) PhantomData<(A, C)>); - -unsafe impl<'t, A: Archetype, C: comp::Must + 'static, T: rw::Read> Accessor - for MustRead -{ - type Entity<'ret> = &'ret C where Self: 'ret; - - unsafe fn entity<'this, 'e, 'ret>( - this: &'this mut Self, - id: entity::TempRef<'e, A>, - ) -> Self::Entity<'ret> - where - Self: 'ret, - { - &*(this.0.get(id) as *const C) - } -} - -pub struct MustReadChunkSimple<'t, A: Archetype, C: comp::Simple> { - pub(crate) storage: &'t C::Storage, -} - -unsafe impl<'t, A: Archetype, C: comp::Simple + comp::Must + 'static> Chunked - for MustReadChunkSimple<'t, A, C> -where - C::Storage: storage::Chunked, -{ - type Chunk<'ret> = &'ret [C] where Self: 'ret; - - unsafe fn chunk<'this, 'e, 'ret>( - this: &'this mut Self, - chunk: entity::TempRefChunk<'e, A>, - ) -> Self::Chunk<'ret> - where - Self: 'ret, - { - &*(this - .storage - .get_chunk(chunk.start, chunk.end) - .expect("TempRefChunk points to missing entities") as *const [C]) - } -} - -pub struct MustWriteChunkSimple<'t, A: Archetype, C: comp::Simple> { - pub(crate) storage: &'t mut C::Storage, -} - -unsafe impl<'t, A: Archetype, C: comp::Simple + comp::Must + 'static> Chunked - for MustWriteChunkSimple<'t, A, C> -where - C::Storage: storage::Chunked, -{ - type Chunk<'ret> = &'ret mut [C] where Self: 'ret; - - unsafe fn chunk<'this, 'e, 'ret>( - this: &'this mut Self, - chunk: entity::TempRefChunk<'e, A>, - ) -> Self::Chunk<'ret> - where - Self: 'ret, - { - &mut *(this - .storage - .get_chunk_mut(chunk.start, chunk.end) - .expect("TempRefChunk points to missing entities") as *mut [C]) - } -} - -/// Return value of [`system::Write::try_access_mut`](crate::system::Write::try_access_mut). -pub struct TryWrite(pub(super) T, pub(super) PhantomData<(A, C)>); - -unsafe impl<'t, A: Archetype, C: 'static, T: rw::Write> Accessor - for TryWrite -{ - type Entity<'ret> = Option<&'ret mut C> where Self: 'ret; - - unsafe fn entity<'this, 'e, 'ret>( - this: &'this mut Self, - id: entity::TempRef<'e, A>, - ) -> Self::Entity<'ret> - where - Self: 'ret, - { - Some(&mut *(this.0.try_get_mut(id)? as *mut C)) - } -} - -/// Return value of [`system::Write::access_mut`](crate::system::Write::access_mut). -pub struct MustWrite(pub(super) T, pub(super) PhantomData<(A, C)>); - -unsafe impl<'t, A: Archetype, C: comp::Must + 'static, T: rw::Write> Accessor - for MustWrite -{ - type Entity<'ret> = &'ret mut C where Self: 'ret; - - unsafe fn entity<'this, 'e, 'ret>( - this: &'this mut Self, - id: entity::TempRef<'e, A>, - ) -> Self::Entity<'ret> - where - Self: 'ret, - { - &mut *(this.0.get_mut(id) as *mut C) - } -} - -unsafe impl<'t, A: Archetype, C: comp::Must + 'static, T: rw::MutChunk> Chunked - for MustWrite -{ - type Chunk<'ret> = &'ret mut [C] where Self: 'ret; - - unsafe fn chunk<'this, 'e, 'ret>( - this: &'this mut Self, - chunk: entity::TempRefChunk<'e, A>, - ) -> Self::Chunk<'ret> - where - Self: 'ret, - { - &mut *(this.0.get_chunk_mut(chunk) as *mut [C]) - } -} - -mod tuple_impl; diff --git a/src/system/accessor/tuple_impl.rs b/src/system/accessor/tuple_impl.rs deleted file mode 100644 index 8729c46ffc..0000000000 --- a/src/system/accessor/tuple_impl.rs +++ /dev/null @@ -1,63 +0,0 @@ -use crate::system::accessor::{self, Accessor}; -use crate::{entity, Archetype}; - -macro_rules! impl_accessor_set_for_tuple { - ($($ty:ident $var:ident,)*) => { - // Safety: accessor::Accessor documentation justified this. - unsafe impl Accessor for ($($ty,)*) - where $( - $ty: Accessor, - )* - { - type Entity<'t> = ($(<$ty as Accessor>::Entity<'t>,)*) - where - Self: 't; - unsafe fn entity<'ret>(this: &mut Self, #[allow(unused_variables)] entity: entity::TempRef<'_, A>) -> Self::Entity<'ret> - { - #[allow(non_snake_case)] - let ($($var,)*) = this; - #[allow(clippy::unused_unit)] - { - ( - $(<$ty as Accessor>::entity($var, entity),)* - ) - } - } - } - - // Safety: accessor::Chunked documentation justified this. - unsafe impl accessor::Chunked for ($($ty,)*) - where $( - $ty: accessor::Chunked, - )* - { - type Chunk<'t> = ($(<$ty as accessor::Chunked>::Chunk<'t>,)*) - where - Self: 't; - unsafe fn chunk<'ret>(this: &mut Self, #[allow(unused_variables)] chunk: entity::TempRefChunk<'_, A>) -> Self::Chunk<'ret> { - #[allow(non_snake_case)] - let ($($var,)*) = this; - - #[allow(clippy::unused_unit)] - { - ( - $(<$ty as accessor::Chunked>::chunk($var, chunk),)* - ) - } - } - } - } -} - -macro_rules! impl_accessor_set_for_tuple_accumulate { - () => { - impl_accessor_set_for_tuple!(); - }; - ($first_ty:ident $first_var:ident, $($rest_ty:ident $rest_var:ident,)*) => { - impl_accessor_set_for_tuple_accumulate!($($rest_ty $rest_var,)*); - impl_accessor_set_for_tuple!($first_ty $first_var, $($rest_ty $rest_var,)*); - } -} -impl_accessor_set_for_tuple_accumulate!( - P0 p0, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10, P11 p11, P12 p12, P13 p13, P14 p14, P15 p15, P16 p16, P17 p17, P18 p18, P19 p19, P20 p20, P21 p21, P22 p22, P23 p23, P24 p24, P25 p25, P26 p26, P27 p27, P28 p28, P29 p29, P30 p30, P31 p31, -); diff --git a/src/system/entity.rs b/src/system/entity.rs deleted file mode 100644 index ff73f3eb56..0000000000 --- a/src/system/entity.rs +++ /dev/null @@ -1,186 +0,0 @@ -use std::cell::RefCell; -use std::marker::PhantomData; -use std::{mem, ops}; - -use super::{accessor, Accessor}; -use crate::entity::{self, ealloc, Ref as _}; -use crate::world::offline; -use crate::{comp, Archetype}; - -/// Allows creating entities of an archetype. -pub trait EntityCreator { - /// Queues to create an entity. - fn create(&mut self, comps: comp::Map) -> entity::Entity { - self.with_hint(comps, Default::default()) - } - - /// Queues to create an entity with hint. - fn with_hint( - &mut self, - comps: comp::Map, - hint: ::AllocHint, - ) -> entity::Entity; -} - -/// An implementation of [`EntityCreator`], used in macros. -/// -/// Semver-exempt. -#[doc(hidden)] -pub struct EntityCreatorImpl<'t, R: ops::DerefMut + 't> -where - ::Target: ealloc::Shard, -{ - pub buffer: &'t RefCell<&'t mut offline::BufferShard>, - pub ealloc: R, -} - -impl<'t, A: Archetype, R: ops::DerefMut> EntityCreator for EntityCreatorImpl<'t, R> -where - ::Target: - ealloc::Shard::AllocHint>, -{ - fn with_hint( - &mut self, - comps: comp::Map, - hint: <::Target as ealloc::Shard>::Hint, - ) -> entity::Entity { - let mut buffer = self.buffer.borrow_mut(); - let ealloc = &mut *self.ealloc; - buffer.create_entity_with_hint_and_shard(comps, &mut *ealloc, hint) - } -} - -/// Allows deleting entities of an archetype. -pub trait EntityDeleter { - /// Queues to mark an entity for deletion. - fn queue>(&mut self, entity: E); -} - -/// An implementation of [`EntityDeleter`], used in macros. -/// -/// Semver-exempt. -#[doc(hidden)] -pub struct EntityDeleterImpl<'t, A: Archetype> { - pub buffer: &'t RefCell<&'t mut offline::BufferShard>, - pub _ph: PhantomData, -} - -impl<'t, A: Archetype> EntityDeleter for EntityDeleterImpl<'t, A> { - fn queue>(&mut self, entity: E) { - let mut buffer = self.buffer.borrow_mut(); - buffer.delete_entity::(entity); - } -} - -/// Allows iterating all entities of an archetype. -pub trait EntityIterator { - /// Return value of [`entities`](Self::entities). - type Entities<'t>: Iterator> - where - Self: 't; - /// Iterates over all entity IDs in this archetype. - fn entities(&self) -> Self::Entities<'_>; - - /// Return value of [`chunks`](Self::chunks). - type Chunks<'t>: Iterator> - where - Self: 't; - /// Iterates over all contiguous chunks of entity IDs. - fn chunks(&self) -> Self::Chunks<'_>; - - /// Return value of [`entities_with`](Self::entities_with). - type EntitiesWith<'t, T: Accessor + 't>: Iterator< - Item = (entity::TempRef<'t, A>, T::Entity<'t>), - > - where - Self: 't; - /// Iterates over all entities, yielding the components requested. - fn entities_with>(&self, accessors: T) -> Self::EntitiesWith<'_, T>; - - /// Return value of [`chunks_with`](Self::chunks_with). - type ChunksWith<'t, T: accessor::Chunked + 't>: Iterator< - Item = (entity::TempRefChunk<'t, A>, T::Chunk<'t>), - > - where - Self: 't; - /// Iterates over all entities, - /// yielding the components requested in contiguous chunks. - fn chunks_with>(&self, accessors: T) -> Self::ChunksWith<'_, T>; -} - -pub struct EntityIteratorImpl { - ealloc: ealloc::Snapshot, -} - -impl EntityIterator for EntityIteratorImpl { - type Entities<'t> = impl Iterator> - where - Self: 't; - fn entities(&self) -> Self::Entities<'_> { - self.ealloc - .iter_allocated_chunks() - .flat_map(::range) - .map(entity::TempRef::new) - } - - type Chunks<'t> = impl Iterator> + 't - where - Self: 't; - fn chunks(&self) -> Self::Chunks<'_> { - self.ealloc - .iter_allocated_chunks() - .map(|range| entity::TempRefChunk::new(range.start, range.end)) - } - - type EntitiesWith<'t, T: Accessor + 't> = impl Iterator, T::Entity<'t>)> - where - Self: 't; - fn entities_with>(&self, mut accessor: T) -> Self::EntitiesWith<'_, T> { - let mut previous = None; - self.entities().map(move |entity| { - if let Some(previous) = mem::replace(&mut previous, Some(entity)) { - assert!(previous.id() < entity.id()); - } - let projected = unsafe { T::entity(&mut accessor, entity) }; - (entity, projected) - }) - } - - type ChunksWith<'t, T: accessor::Chunked + 't> = impl Iterator, T::Chunk<'t>)> + 't - where - Self: 't; - fn chunks_with>(&self, mut accessor: T) -> Self::ChunksWith<'_, T> { - let mut previous = None; - self.chunks().map(move |chunk: entity::TempRefChunk| { - assert!(chunk.start <= chunk.end); - if let Some(previous) = mem::replace(&mut previous, Some(chunk)) { - assert!(previous.end < chunk.start); - } - let projected = unsafe { T::chunk(&mut accessor, chunk) }; - (chunk, projected) - }) - } -} - -/// Constructs an instance of [`EntityIterator`] that reads from the given allocator. -/// -/// Although this function accepts an allocator shard, -/// it actually reads the global buffer shared between shards, -/// which is independent of the changes in the current shard. -/// Hence, the iterator describe the state after the previous tick completes, -/// which does not include newly initialized entities -/// and includes those queued for deletion. -/// This behavior is reasonable, because newly initialized entities should not be accessed at all, -/// and those queued for deletion may have a finalizer or -/// be given a finalizer when running later systems, -/// so those queued for deletion are still included. -/// -/// This function is typically called from the code generated by -/// [`#[system]`](macro@crate::system). -pub fn entity_iterator( - ealloc: ealloc::Snapshot, -) -> impl EntityIterator -where -{ - EntityIteratorImpl { ealloc } -} diff --git a/src/system/iter.rs b/src/system/iter.rs new file mode 100644 index 0000000000..b89d162370 --- /dev/null +++ b/src/system/iter.rs @@ -0,0 +1,416 @@ +//! Iterate over entities of an archetype. +//! +//! While individual accessors also provide functions like +//! [`AccessSingle::iter`](access::Single::iter), +//! functions in [`EntityIterator`] use the entity indices +//! from the entity allocator snapshot directly, +//! enabling better performance with chunk partitioning. + +use std::marker::PhantomData; +use std::{any, iter, mem, ops}; + +use rayon::prelude::ParallelIterator; + +use super::access::single; +use crate::entity::ealloc::snapshot; +use crate::entity::{ealloc, Raw as _}; +use crate::system::access; +use crate::{comp, entity, storage, util, Archetype, Storage}; + +/// Allows iterating all entities of an archetype. +pub struct EntityIterator { + ealloc: ealloc::Snapshot, +} + +impl EntityIterator { + /// Constructs an instance of [`EntityIterator`] that reads from the given allocator. + /// + /// Although this function accepts an allocator shard, + /// it actually reads the global buffer shared between shards, + /// which is independent of the changes in the current shard. + /// Hence, the iterator describe the state after the previous tick completes, + /// which does not include newly initialized entities + /// and includes those queued for deletion. + /// This behavior is reasonable, because newly initialized entities should not be accessed at all, + /// and those queued for deletion may have a finalizer or + /// be given a finalizer when running later systems, + /// so those queued for deletion are still included. + /// + /// This function is typically called from the code generated by + /// [`#[system]`](macro@crate::system). + pub fn new(ealloc: ealloc::Snapshot) -> Self { Self { ealloc } } + + /// Iterates over all entity IDs in this archetype. + pub fn entities(&self) -> impl Iterator> { + self.ealloc + .iter_allocated_chunks() + .flat_map(::range) + .map(entity::TempRef::new) + } + + /// Iterates over all contiguous chunks of entity IDs. + pub fn chunks(&self) -> impl Iterator> + '_ { + self.ealloc + .iter_allocated_chunks() + .map(|range| entity::TempRefChunk::new(range.start, range.end)) + } + + /// Iterates over all entities, yielding the components requested. + pub fn entities_with>( + &self, + zip: IntoZ, + ) -> impl Iterator, >::Item)> { + let mut zip = ZipIter(zip.into_zip(), PhantomData); + self.ealloc + .iter_allocated_chunks() + .flat_map(::range) + .map(move |entity| (entity::TempRef::new(entity), zip.take_serial(entity))) + } + + /// Iterates over all entities, yielding the components requested in contiguous chunks. + pub fn chunks_with>( + &self, + zip: IntoZ, + ) -> impl Iterator, >::Chunk)> + where + IntoZ::IntoZip: ZipChunked, + { + let mut zip = ZipIter(zip.into_zip(), PhantomData); + self.ealloc.iter_allocated_chunks().map(move |chunk| { + ( + entity::TempRefChunk::new(chunk.start, chunk.end), + zip.take_serial_chunk(chunk.start, chunk.end), + ) + }) + } + + /// Same as [`entities_with`](Self::entities_with), + /// but leverages chunked storages for better performance. + pub fn entities_with_chunked>( + &self, + zip: IntoZ, + ) -> impl Iterator, >::Item)> + where + IntoZ::IntoZip: ZipChunked, + { + self.chunks_with(zip).flat_map(|(entities, data)| { + iter::zip( + entity::Raw::range(entities.start..entities.end).map(entity::TempRef::new), + >::chunk_to_entities(data), + ) + }) + } + + fn par_raw_chunks>( + &self, + zip: IntoZ, + ) -> impl ParallelIterator, IntoZ::IntoZip)> + where + IntoZ::IntoZip: Send, + { + rayon::iter::split((self.ealloc.as_slice(), zip.into_zip()), |(slice, zip)| { + let Some(midpt) = slice.midpoint_for_split() else { return ((slice, zip), None) }; + let (slice_left, slice_right) = slice.split_at(midpt); + let mut zip_left = zip; + let zip_right = zip_left.split(midpt); + ((slice_left, zip_left), Some((slice_right, zip_right))) + }) + } + + /// Iterates over all entities in parallel, yielding the components requested. + pub fn par_entities_with>( + &self, + zip: IntoZ, + ) -> impl ParallelIterator, >::Item)> + where + IntoZ::IntoZip: Send, + >::Item: Send, + { + self.par_raw_chunks(zip).flat_map_iter(|(slice, zip)| { + let mut zip_iter = ZipIter(zip, PhantomData); + entity::Raw::range(slice.start..slice.end) + .map(move |entity| (entity::TempRef::new(entity), zip_iter.take_serial(entity))) + }) + } + + /// Iterates over all chunks of entities in parallel, yielding the components requested. + pub fn par_chunks_with>( + &self, + zip: IntoZ, + ) -> impl ParallelIterator, >::Chunk)> + where + IntoZ::IntoZip: ZipChunked + Send, + >::Chunk: Send, + { + self.par_raw_chunks(zip).map(|(slice, zip)| { + let mut zip_iter = ZipIter(zip, PhantomData); + ( + entity::TempRefChunk::new(slice.start, slice.end), + zip_iter.take_serial_chunk(slice.start, slice.end), + ) + }) + } + + /// Same as [`par_entities_with`](Self::par_entities_with), + /// but leverages chunked storages for better performance. + pub fn par_entities_with_chunked>( + &self, + zip: IntoZ, + ) -> impl ParallelIterator, >::Item)> + where + IntoZ::IntoZip: ZipChunked + Send, + >::Item: Send, + { + self.par_raw_chunks(zip).flat_map_iter(|(slice, zip)| { + iter::zip( + entity::Raw::range(slice.start..slice.end).map(entity::TempRef::new), + >::chunk_to_entities( + ZipIter(zip, PhantomData).take_serial_chunk(slice.start, slice.end), + ), + ) + }) + } +} + +struct ZipIter>(Z, PhantomData); + +impl> ZipIter { + fn take_serial(&mut self, entity: A::RawEntity) -> Z::Item { + let right = self.0.split(entity.add(1)); // add 1 so that `entity` remains on the left chunk + let left = mem::replace(&mut self.0, right); + left.get(entity::TempRef::new(entity)) + } +} + +impl> ZipIter { + fn take_serial_chunk(&mut self, start: A::RawEntity, end: A::RawEntity) -> Z::Chunk { + let right = self.0.split(end); // no need to add 1 here since `end` does not belong to the required chunk + let left = mem::replace(&mut self.0, right); + left.get_chunk(entity::TempRefChunk::new(start, end)) + } +} + +/// Multiple single accessors zipped together, +/// to be used with [`EntityIterator::entities_with`](crate::system::EntityIterator::entities_with). +/// +/// All accessors must target the same archetype `A`. +/// +/// See [`IntoZip`] for what values can be passed for `Zip`. +pub trait Zip: Sized { + /// Vertically splits each underlying storage vertically (by entities) at `offset`. + fn split(&mut self, offset: A::RawEntity) -> Self; + + /// The type of values available for a single entity. + type Item; + /// Returns the requested components for the specified entity. + fn get>(self, entity: E) -> Self::Item; +} + +/// [`Zip`] accessors with the additional condition that +/// all underlying storages support chunked access, +/// to be used with [`EntityIterator::chunks_with`](crate::system::EntityIterator::chunks_with). +pub trait ZipChunked: Zip { + /// The type of values available for a single chunk. + type Chunk; + /// Returns the requested components as chunks for the specified entities. + fn get_chunk(self, chunk: entity::TempRefChunk) -> Self::Chunk; + + /// Converts a chunk into an iterator of items. + fn chunk_to_entities(chunk: Self::Chunk) -> impl Iterator; +} + +/// Values that can be used as a [`Zip`] in [`EntityIterator`], +/// similar to [`IntoIterator`] for iterators. +/// +/// This trait is intended to map storages to components of a single entity, +/// so it is implemented by: +/// - [`&ReadSimple`](crate::system::ReadSimple) and [`&mut WriteSimple`](crate::system::WriteSimple) +/// - Shared/mutable references to [split](access::Isotope::split) isotope accessors +/// - Any of the above wrapped with [`Try`] for [optional](comp::Presence::Optional) components. +/// - Non-empty tuples of `Zip` implementors, including other tuples. +/// - Structs of `Zip` fields that use the [`Zip`](crate::zip) derive macro. +/// +/// The default configuration only implements for tuples of up to 4 elements. +/// To use larger tuples at the cost of slower compile time, +/// use the feature `"tuple-impl-{n}-zip"`, +/// where `{n}` is `8`, `16`, `24` or `32`. +pub trait IntoZip { + /// The [`Zip`] type that this is converted into. + type IntoZip: Zip; + /// Converts into a [`Zip`] object. + fn into_zip(self) -> Self::IntoZip; +} + +/// Determines how to resolve the case of a missing Result. +pub trait MissingResln: Send + Sync { + /// The return type of the resolution. + type Result; + /// Resolves an optional value. + fn must_or_try(option: Option) -> Self::Result; +} + +/// Automatically unwraps storage results. +pub struct MustMissingResln>(PhantomData<(A, C)>); +impl> MissingResln for MustMissingResln { + type Result = T; + fn must_or_try(option: Option) -> T { + match option { + Some(value) => value, + None => panic!( + "Component {}/{} implements comp::Must but is not present", + any::type_name::(), + any::type_name::() + ), + } + } +} + +/// Returns `None` if component is missing. +pub enum TryMissingResln {} +impl MissingResln for TryMissingResln { + type Result = Option; + fn must_or_try(option: Option) -> Option { option } +} + +/// Wrap accessor references with `Try` to indicate that the result should be an `Option`. +pub struct Try(pub T); + +impl<'t, A, C, AccessorT> IntoZip for Try<&'t AccessorT> +where + A: Archetype, + C: comp::SimpleOrIsotope, + AccessorT: single::Get, +{ + type IntoZip = Read<'t, A, C, AccessorT, TryMissingResln>; + fn into_zip(self) -> Self::IntoZip { Read { accessor: self.0, _ph: PhantomData } } +} + +impl<'t, A, C, AccessorT> IntoZip for &'t AccessorT +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + AccessorT: single::Get, +{ + type IntoZip = Read<'t, A, C, AccessorT, MustMissingResln>; + fn into_zip(self) -> Self::IntoZip { Read { accessor: self, _ph: PhantomData } } +} + +/// [`IntoZip::IntoZip`] for read-only accessors. +pub struct Read<'t, A, C, AccessorT, Resln> { + accessor: &'t AccessorT, + _ph: PhantomData<(A, C, Resln)>, +} + +impl<'t, A, C, AccessorT, Resln> Copy for Read<'t, A, C, AccessorT, Resln> {} +impl<'t, A, C, AccessorT, Resln> Clone for Read<'t, A, C, AccessorT, Resln> { + fn clone(&self) -> Self { *self } +} + +impl<'t, A, C, AccessorT, Resln> Zip for Read<'t, A, C, AccessorT, Resln> +where + A: Archetype, + C: comp::SimpleOrIsotope, + AccessorT: single::Get, + Resln: MissingResln, +{ + fn split(&mut self, _offset: A::RawEntity) -> Self { *self } + + type Item = Resln::Result<&'t C>; + fn get>(self, entity: E) -> Resln::Result<&'t C> { + Resln::must_or_try(self.accessor.try_get(entity)) + } +} + +impl<'t, A, C, AccessorT> ZipChunked for Read<'t, A, C, AccessorT, MustMissingResln> +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + AccessorT: single::Get + single::GetChunked, +{ + type Chunk = &'t [C]; + fn get_chunk(self, chunk: entity::TempRefChunk) -> Self::Chunk { + self.accessor.get_chunk(chunk) + } + + fn chunk_to_entities(chunk: Self::Chunk) -> impl Iterator { chunk.iter() } +} + +impl<'t, A, C, StorageRef> IntoZip for Try<&'t mut access::Single> +where + A: Archetype, + C: comp::SimpleOrIsotope, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: Storage, +{ + type IntoZip = Write< + 't, + A, + C, + util::OwnedDeref<::Partition<'t>>, + TryMissingResln, + >; + fn into_zip(self) -> Self::IntoZip { + Write { accessor: self.0.as_partition(), _ph: PhantomData } + } +} + +impl<'t, A, C, StorageRef> IntoZip for &'t mut access::Single +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + StorageRef: ops::DerefMut + Sync, + StorageRef::Target: Storage, +{ + type IntoZip = Write< + 't, + A, + C, + util::OwnedDeref<::Partition<'t>>, + MustMissingResln, + >; + fn into_zip(self) -> Self::IntoZip { Write { accessor: self.as_partition(), _ph: PhantomData } } +} + +/// [`IntoZip::IntoZip`] for mutable accessors. +pub struct Write<'t, A, C, PartitionT, Resln> { + accessor: access::Single, + _ph: PhantomData<(&'t mut C, Resln)>, +} + +impl<'t, A, C, PartitionT, Resln> Zip for Write<'t, A, C, util::OwnedDeref, Resln> +where + A: Archetype, + C: comp::SimpleOrIsotope, + PartitionT: storage::Partition<'t, RawEntity = A::RawEntity, Comp = C>, + Resln: MissingResln, +{ + fn split(&mut self, offset: A::RawEntity) -> Self { + let right = self.accessor.split_out(offset); + Self { accessor: right, _ph: PhantomData } + } + + type Item = Resln::Result<&'t mut C>; + fn get>(self, entity: E) -> Resln::Result<&'t mut C> { + Resln::must_or_try(self.accessor.try_into_mut(entity)) + } +} + +impl<'t, A, C, PartitionT> ZipChunked + for Write<'t, A, C, util::OwnedDeref, MustMissingResln> +where + A: Archetype, + C: comp::SimpleOrIsotope + comp::Must, + PartitionT: storage::PartitionChunked<'t, RawEntity = A::RawEntity, Comp = C>, +{ + type Chunk = &'t mut [C]; + fn get_chunk(self, chunk: entity::TempRefChunk) -> Self::Chunk { + self.accessor.into_chunk_mut(chunk) + } + + fn chunk_to_entities(chunk: Self::Chunk) -> impl Iterator { chunk.iter_mut() } +} + +mod tuple_impls; + +#[cfg(test)] +mod tests; diff --git a/src/system/iter/tests.rs b/src/system/iter/tests.rs new file mode 100644 index 0000000000..1708317497 --- /dev/null +++ b/src/system/iter/tests.rs @@ -0,0 +1,244 @@ +//! Tests EntityIterator. + +use rayon::prelude::ParallelIterator; + +use crate::entity::{Raw as _, Ref}; +use crate::test_util::*; +use crate::{system, system_test, system_test_exported, tracer, world}; + +macro_rules! test_partial_single_system { + ($test_name:ident $iter_method:ident) => { + #[test] + fn $test_name() { + #[system(dynec_as(crate))] + fn test_system( + iter: system::EntityIterator, + simple_acc: system::ReadSimple, + #[dynec(isotope(discrim = [TestDiscrim1(7), TestDiscrim1(13)]))] + mut double_iso_acc: system::WriteIsotopePartial, + #[dynec(isotope(discrim = [TestDiscrim1(31)]))] + mut single_iso_acc: system::ReadIsotopePartial, + ) { + let [mut double_iso_acc_0, mut double_iso_acc_1] = + double_iso_acc.split_isotopes([0, 1]); + let [single_iso_acc_0] = single_iso_acc.split([0]); + + let iter_collected: Vec<_> = iter + .$iter_method(( + system::Try(&simple_acc), + system::Try(&mut double_iso_acc_0), + system::Try(&mut double_iso_acc_1), + system::Try(&single_iso_acc_0), + )) + .collect(); + for (entity, (simple, double0, double1, single)) in iter_collected { + match entity.id().to_primitive() { + 1 => { + assert_eq!(simple, Some(&Simple1OptionalNoDepNoInit(5))); + assert_eq!(double0, Some(&mut IsoNoInit(11))); + assert_eq!(double1, None); + assert_eq!(single, Some(&IsoNoInit(41))); + } + 2 => { + assert_eq!(simple, None); + assert_eq!(double0, None); + assert_eq!(double1, Some(&mut IsoNoInit(17))); + assert_eq!(single, Some(&IsoNoInit(43))); + } + 3 => { + assert_eq!(simple, None); + assert_eq!(double0, Some(&mut IsoNoInit(19))); + assert_eq!(double1, None); + assert_eq!(single, None); + } + _ => unreachable!(), + } + } + } + + let mut world = system_test! { + test_system.build(); + _: TestArch = ( + Simple1OptionalNoDepNoInit(5), + @(TestDiscrim1(7), IsoNoInit(11)), + @(TestDiscrim1(31), IsoNoInit(41)), + ); + _: TestArch = ( + @(TestDiscrim1(13), IsoNoInit(17)), + @(TestDiscrim1(31), IsoNoInit(43)), + ); + _: TestArch = ( + @(TestDiscrim1(7), IsoNoInit(19)), + ); + }; + + world.execute(&tracer::Log(log::Level::Trace)); + } + }; +} + +test_partial_single_system!(test_partial_single_serial entities_with); +test_partial_single_system!(test_partial_single_chunked par_entities_with); + +#[test] +fn test_entity_iter_partial_chunked_mut() { + #[system(dynec_as(crate))] + fn test_system( + iter: system::EntityIterator, + simple_acc: system::ReadSimple, + #[dynec(isotope(discrim = [TestDiscrim2(7), TestDiscrim2(13)]))] + mut double_iso_acc: system::WriteIsotopePartial< + TestArch, + IsoWithInit, + [TestDiscrim2; 2], + >, + #[dynec(isotope(discrim = [TestDiscrim2(31)]))] + mut single_iso_acc: system::ReadIsotopePartial< + TestArch, + IsoWithInit, + [TestDiscrim2; 1], + >, + ) { + let [mut double_iso_acc_0, double_iso_acc_1] = double_iso_acc.split_isotopes([0, 1]); + let [single_iso_acc_0] = single_iso_acc.split([0]); + + for (chunk_enumerate, (chunk, (simple, double0, double1, single))) in iter + .chunks_with((&simple_acc, &mut double_iso_acc_0, &double_iso_acc_1, &single_iso_acc_0)) + .enumerate() + { + match chunk_enumerate { + 0 => { + assert_eq!(chunk.start.get(), 1); + assert_eq!(chunk.end.get(), 3); + + assert_eq!(simple[0], Simple5RequiredNoInit(5)); + assert_eq!(double0[0], IsoWithInit(11)); + assert_eq!(double1[0], IsoWithInit(73)); + assert_eq!(single[0], IsoWithInit(41)); + + assert_eq!(simple[1], Simple5RequiredNoInit(47)); + assert_eq!(double0[1], IsoWithInit(73)); + assert_eq!(double1[1], IsoWithInit(17)); + assert_eq!(single[1], IsoWithInit(43)); + } + 1 => { + assert_eq!(chunk.start.get(), 4); + assert_eq!(chunk.end.get(), 5); + + assert_eq!(simple[0], Simple5RequiredNoInit(53)); + assert_eq!(double0[0], IsoWithInit(19)); + assert_eq!(double1[0], IsoWithInit(23)); + assert_eq!(single[0], IsoWithInit(73)); + } + _ => unreachable!(), + } + } + } + + let (mut world, (hole,)) = system_test_exported! { + test_system.build(); + TestArch = ( + Simple5RequiredNoInit(5), + @(TestDiscrim2(7), IsoWithInit(11)), + @(TestDiscrim2(31), IsoWithInit(41)), + ); + TestArch = ( + Simple5RequiredNoInit(47), + @(TestDiscrim2(13), IsoWithInit(17)), + @(TestDiscrim2(31), IsoWithInit(43)), + ); + let hole: TestArch = ( + Simple5RequiredNoInit(404), + ); + TestArch = ( + Simple5RequiredNoInit(53), + @(TestDiscrim2(7), IsoWithInit(19)), + @(TestDiscrim2(13), IsoWithInit(23)), + ); + }; + + let delete_result = world.delete(hole); + assert_eq!(delete_result, world::DeleteResult::Deleted); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +fn test_entity_iter_full_mut() { + #[system(dynec_as(crate))] + fn test_system( + iter: system::EntityIterator, + comp1_acc: system::ReadSimple, + mut iso1_acc: system::WriteIsotopeFull, + ) { + let [mut iso1_acc_0, mut iso1_acc_1] = + iso1_acc.split_isotopes([TestDiscrim1(7), TestDiscrim1(13)]); + + for (entity, (comp1, iso10, iso11)) in iter.entities_with(( + system::Try(&comp1_acc), + system::Try(&mut iso1_acc_0), + system::Try(&mut iso1_acc_1), + )) { + match entity.id().to_primitive() { + 1 => { + assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(5))); + assert_eq!(iso10, Some(&mut IsoNoInit(11))); + assert_eq!(iso11, None); + } + 2 => { + assert_eq!(comp1, None); + assert_eq!(iso10, None); + assert_eq!(iso11, Some(&mut IsoNoInit(17))); + } + 3 => { + assert_eq!(comp1, None); + assert_eq!(iso10, Some(&mut IsoNoInit(19))); + assert_eq!(iso11, Some(&mut IsoNoInit(23))); + } + _ => unreachable!(), + } + } + } + + let mut world = system_test! { + test_system.build(); + _: TestArch = ( + Simple1OptionalNoDepNoInit(5), + @(TestDiscrim1(7), IsoNoInit(11)), + ); + _: TestArch = ( + @(TestDiscrim1(13), IsoNoInit(17)), + ); + _: TestArch = ( + @(TestDiscrim1(7), IsoNoInit(19)), + @(TestDiscrim1(13), IsoNoInit(23)), + ); + }; + + world.execute(&tracer::Log(log::Level::Trace)); +} + +// Test that there is no access conflict when creating, deleting and iterating the same archetype. +#[test] +fn test_entity_create_and_delete() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_creator: system::EntityCreator, + _entity_deleter: system::EntityDeleter, + entity_iter: system::EntityIterator, + ) { + let entity = entity_creator + .create(crate::comps![ @(crate) TestArch => Simple1OptionalNoDepNoInit(1) ]); + for v in entity_iter.entities() { + assert_ne!(entity.id(), v.id()); + } + } + + #[system(dynec_as(crate))] + fn dummy_reader_system(_: system::ReadSimple) {} + + let mut world = system_test! { + test_system.build(), dummy_reader_system.build(); + }; + world.execute(&tracer::Log(log::Level::Trace)); +} diff --git a/src/system/iter/tuple_impls.rs b/src/system/iter/tuple_impls.rs new file mode 100644 index 0000000000..0f6bc466a7 --- /dev/null +++ b/src/system/iter/tuple_impls.rs @@ -0,0 +1,114 @@ +#![allow(non_snake_case, clippy::unused_unit)] + +use super::{IntoZip, Zip, ZipChunked}; +use crate::{entity, Archetype}; + +/// Similar to [`itertools::izip!`], but produces always tuples even for 1-tuples. +macro_rules! uniform_izip { + ($expr:expr,) => { + $expr.into_iter().map(|v| (v,)) + }; + ($($expr:expr,)*) => { + itertools::izip!($($expr,)*) + } +} + +macro_rules! impl_zip_for_tuple { + ($($idents:ident)*) => { + impl IntoZip for ($($idents,)*) + where + $($idents: IntoZip,)* + { + type IntoZip = ($( + <$idents as IntoZip>::IntoZip, + )*); + + fn into_zip(self) -> Self::IntoZip { + let ($($idents,)*) = self; + ($( + IntoZip::::into_zip($idents), + )*) + } + } + + impl Zip for ($($idents,)*) + where + $($idents: Zip,)* + { + fn split(&mut self, offset: A::RawEntity) -> Self { + let ($($idents,)*) = self; + ($( + Zip::::split($idents, offset), + )*) + } + + type Item = ($( + <$idents as Zip>::Item, + )*); + fn get>(self, entity: E) -> Self::Item { + let ($($idents,)*) = self; + let entity = entity::TempRef::::new(entity.id()); + ($( + Zip::::get($idents, entity), + )*) + } + } + + impl ZipChunked for ($($idents,)*) + where + $($idents: ZipChunked,)* + { + type Chunk = ($( + <$idents as ZipChunked>::Chunk, + )*); + fn get_chunk(self, chunk: entity::TempRefChunk) -> Self::Chunk { + let ($($idents,)*) = self; + ($( + ZipChunked::::get_chunk($idents, chunk), + )*) + } + + fn chunk_to_entities(chunk: Self::Chunk) -> impl Iterator>::Item, + )*)> { + let ($($idents,)*) = chunk; + uniform_izip!($((<$idents as ZipChunked>::chunk_to_entities($idents)),)*) + } + } + } +} + +macro_rules! impl_zip_for_tuple_accumulate { + ($feature:literal $first:ident $($rest:tt)*) => { + impl_zip_for_tuple_accumulate!($feature $($rest)*); + #[cfg(feature = $feature)] + impl_zip_for_tuple_accumulate!(@MIXED $first $($rest)*); + }; + ($outer_feature:literal $inner_feature:literal $($rest:tt)*) => { + impl_zip_for_tuple_accumulate!($inner_feature $($rest)*); + }; + ($outer_feature:literal @ALWAYS $($rest:tt)*) => { + impl_zip_for_tuple_accumulate!(@ALWAYS $($rest)*); + }; + (@ALWAYS $first:ident $($rest:tt)*) => { + impl_zip_for_tuple_accumulate!(@ALWAYS $($rest)*); + impl_zip_for_tuple!($first $($rest)*); + }; + (@ALWAYS) => { + #[allow(unused_variables)] + const _: () = { + // impl_zip_for_tuple!(); + }; + }; + (@MIXED $($idents_front:ident)* $($feature:literal $($idents_feature:ident)*)* @ALWAYS $($idents_always:ident)*) => { + impl_zip_for_tuple!($($idents_front)* $($($idents_feature)*)* $($idents_always)*); + }; +} + +impl_zip_for_tuple_accumulate!( + "tuple-impl-32-zip" T1 T2 T3 T4 T5 T6 T7 T8 + "tuple-impl-24-zip" T9 T10 T11 T12 T13 T14 T15 T16 + "tuple-impl-16-zip" T17 T18 T19 T20 T21 T22 T23 T24 + "tuple-impl-8-zip" T25 T26 T27 T28 + @ALWAYS T29 T30 T31 T32 +); diff --git a/src/system/offline_buffer.rs b/src/system/offline_buffer.rs new file mode 100644 index 0000000000..ba3496399f --- /dev/null +++ b/src/system/offline_buffer.rs @@ -0,0 +1,60 @@ +use std::cell::RefCell; +use std::marker::PhantomData; + +use crate::entity::{self, ealloc}; +use crate::world::offline; +use crate::{comp, Archetype}; + +/// Allows creating entities of an archetype. +pub struct EntityCreator<'t, A: Archetype> { + buffer: &'t RefCell<&'t mut offline::BufferShard>, + ealloc: ealloc::BorrowedShard<'t, A>, +} + +impl<'t, A: Archetype> EntityCreator<'t, A> { + /// Constructs an entity creator. + pub fn new( + buffer: &'t RefCell<&'t mut offline::BufferShard>, + ealloc: ealloc::BorrowedShard<'t, A>, + ) -> Self { + Self { buffer, ealloc } + } + + /// Queues to create an entity. + pub fn create(&mut self, comps: comp::Map) -> entity::Entity { + self.with_hint(comps, Default::default()) + } + + /// Queues to create an entity with hint. + pub fn with_hint( + &mut self, + comps: comp::Map, + hint: ::AllocHint, + ) -> entity::Entity { + let mut buffer = self.buffer.borrow_mut(); + let ealloc = &mut *self.ealloc; + buffer.create_entity_with_hint_and_shard(comps, &mut *ealloc, hint) + } +} + +/// Allows deleting entities of an archetype. +pub struct EntityDeleter<'t, A: Archetype> { + buffer: &'t RefCell<&'t mut offline::BufferShard>, + _ph: PhantomData, +} + +impl<'t, A: Archetype> EntityDeleter<'t, A> { + /// Constructs an entity deleter from a macro. + pub fn new(buffer: &'t RefCell<&'t mut offline::BufferShard>) -> Self { + Self { buffer, _ph: PhantomData } + } + + /// Queues to mark an entity for deletion. + pub fn queue>(&mut self, entity: E) { + let mut buffer = self.buffer.borrow_mut(); + buffer.delete_entity::(entity); + } +} + +#[cfg(test)] +mod tests; diff --git a/src/system/offline_buffer/tests.rs b/src/system/offline_buffer/tests.rs new file mode 100644 index 0000000000..dae4993d30 --- /dev/null +++ b/src/system/offline_buffer/tests.rs @@ -0,0 +1,400 @@ +//! Tests EntityCreator and EntityDeleter. + +use crate::entity::{deletion, generation}; +use crate::test_util::*; +use crate::{global, system, system_test, tracer, world, Entity}; + +#[test] +fn test_entity_create() { + #[global(dynec_as(crate), initial = Step::Create)] + enum Step { + Create, + Access, + } + + #[derive(Debug, PartialEq, Eq, Hash)] + struct LatePartition; + + #[system(dynec_as(crate), before(LatePartition))] + fn entity_creator_system( + mut entity_creator: system::EntityCreator, + #[dynec(global(maybe_uninit(TestArch)))] initials: &mut InitialEntities, + #[dynec(global)] step: &Step, + ) { + match step { + Step::Create => { + initials.strong = Some( + entity_creator + .create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(5)]), + ); + } + Step::Access => {} + } + } + + #[system(dynec_as(crate))] + fn comp_access_system( + comp1: system::ReadSimple, + #[dynec(global)] initials: &InitialEntities, + #[dynec(global)] step: &Step, + ) { + match step { + Step::Create => { + assert!(initials.strong.is_none()); + } + Step::Access => { + let ent = initials.strong.as_ref().expect("initials.strong should have been set"); + comp1.try_get(ent).expect("initials.strong should have been initialized"); + } + } + } + + #[system(dynec_as(crate), after(LatePartition))] + fn late_comp_access_system( + // component storage does not require maybe_uninit unless the component has something like `Option>` + comp1: system::ReadSimple, + #[dynec(global(maybe_uninit(TestArch)))] initials: &InitialEntities, + #[dynec(global)] step: &Step, + ) { + match step { + Step::Create => { + let ent = initials.strong.as_ref().expect("initials.strong should have been set"); + assert!(comp1.try_get(ent).is_none(), "entity should be in pre-initialize state"); + } + Step::Access => { + let ent = initials.strong.as_ref().expect("initials.strong should have been set"); + comp1.try_get(ent).expect("initials.strong should have been initialized"); + } + } + } + + let mut world = system_test!(comp_access_system.build(), late_comp_access_system.build(), entity_creator_system.build();); + + world.execute(&tracer::Log(log::Level::Trace)); + *world.get_global::() = Step::Access; + world.execute(&tracer::Log(log::Level::Trace)); + + let ent = { + let initials = world.get_global::(); + let ent = initials.strong.as_ref().expect("initials.strong missing"); + ent.clone() + }; + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&ent); + assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(5))); +} + +#[test] +#[should_panic = "Scheduled systems have a cyclic dependency: "] +fn test_entity_create_conflict() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_creator: system::EntityCreator, + #[dynec(global)] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + initials.strong = Some( + entity_creator + .create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(5)]), + ); + } + + let mut world = system_test!(test_system.build();); + + world.execute(&tracer::Log(log::Level::Trace)); + + let ent = { + let initials = world.get_global::(); + let ent = initials.strong.as_ref().expect("initials.strong missing"); + ent.clone() + }; + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&ent); + assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(5))); +} + +#[test] +fn test_entity_delete() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + entity_deleter.queue(initials.strong.take().expect("initials.strong missing")); + } + + let mut world = system_test!(test_system.build();); + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); + let weak = ent.weak(world.get_global::()); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&weak); + assert_eq!(comp1, None); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + system dynec::system::offline_buffer::tests::test_system. All strong \ + references to an entity must be dropped before queuing for deletion and \ + removing all finalizers." +)] +fn test_entity_delete_send_system_leak() { + #[system(dynec_as(crate))] + fn test_system( + #[dynec(local(initial = None, entity))] entity: &mut Option>, + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + if let Some(ent) = initials.strong.take() { + *entity = Some(ent); + } + + if let Some(ent) = entity { + entity_deleter.queue(&*ent); + } + } + + let mut builder = world::Builder::new(0); + builder.schedule(test_system.build()); + + let mut world = builder.build(); + + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); + let weak = ent.weak(world.get_global::()); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&weak); + assert_eq!(comp1, None); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + system dynec::system::offline_buffer::tests::test_system. All strong \ + references to an entity must be dropped before queuing for deletion and \ + removing all finalizers." +)] +fn test_entity_delete_unsend_system_leak() { + #[system(dynec_as(crate), thread_local)] + fn test_system( + #[dynec(local(initial = None, entity))] entity: &mut Option>, + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + if let Some(ent) = initials.strong.take() { + *entity = Some(ent); + } + + if let Some(ent) = entity { + entity_deleter.queue(&*ent); + } + } + + let mut builder = world::Builder::new(0); + builder.schedule_thread_unsafe(test_system.build()); + + let mut world = builder.build(); + + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); + let weak = ent.weak(world.get_global::()); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&weak); + assert_eq!(comp1, None); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + global state dynec::test_util::globals::InitialEntities. All strong \ + references to an entity must be dropped before queuing for deletion and \ + removing all finalizers." +)] +fn test_entity_delete_sync_global_leak() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + entity_deleter.queue(initials.strong.as_ref().expect("initials.strong missing")); + } + + let mut world = system_test!(test_system.build();); + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); + world.get_global::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + global state dynec::test_util::globals::InitialEntities. All strong \ + references to an entity must be dropped before queuing for deletion and \ + removing all finalizers." +)] +fn test_entity_delete_unsync_global_leak() { + #[system(dynec_as(crate), thread_local)] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global(thread_local))] initials: &mut InitialEntities, + _comp1: system::ReadSimple, + ) { + entity_deleter.queue(initials.strong.as_ref().expect("initials.strong missing")); + } + + let mut builder = world::Builder::new(0); + builder.schedule_thread_unsafe(test_system.build()); + + let mut world = builder.build(); + + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); + world.get_global_unsync::().strong = Some(ent); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + dynec::test_util::TestArch / dynec::test_util::simple_comps::StrongRefSimple. \ + All strong references to an entity must be dropped before queuing for \ + deletion and removing all finalizers." +)] +fn test_entity_delete_simple_leak() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _srs: system::ReadSimple, + ) { + let entity = initials.weak.as_ref().expect("initials.strong missing"); + entity_deleter.queue(entity); + } + + let mut builder = world::Builder::new(0); + builder.schedule(test_system.build()); + + let mut world = builder.build(); + + let ent = world.create(crate::comps![@(crate) TestArch =>]); + let weak = ent.weak(world.get_global::()); + world.get_global::().weak = Some(weak); + + world.create(crate::comps![@(crate) TestArch => StrongRefSimple(ent)]); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +#[cfg_attr( + any( + all(debug_assertions, feature = "debug-entity-rc"), + all(not(debug_assertions), feature = "release-entity-rc"), + ), + should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ + dynec::test_util::TestArch / \ + dynec::test_util::isotope_comps::StrongRefIsotope # TestDiscrim1(29). All \ + strong references to an entity must be dropped before queuing for deletion \ + and removing all finalizers." +)] +fn test_entity_delete_isotope_leak() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + _sri: system::ReadIsotopeFull, + ) { + let entity = initials.weak.as_ref().expect("initials.strong missing"); + entity_deleter.queue(entity); + } + + let mut builder = world::Builder::new(0); + builder.schedule(test_system.build()); + + let mut world = builder.build(); + + let ent = world.create(crate::comps![@(crate) TestArch =>]); + let weak = ent.weak(world.get_global::()); + world.get_global::().weak = Some(weak); + + world.create(crate::comps![@(crate) TestArch => @(TestDiscrim1(29), StrongRefIsotope(ent))]); + + world.execute(&tracer::Log(log::Level::Trace)); +} + +#[test] +fn test_entity_finalizer_delete() { + #[system(dynec_as(crate))] + fn test_system( + mut entity_deleter: system::EntityDeleter, + #[dynec(global)] initials: &mut InitialEntities, + deletion_flags: system::ReadSimple, + mut comp_final: system::WriteSimple, + _comp1: system::ReadSimple, + ) { + let ent = initials.strong.as_ref().expect("initials.strong missing"); + if deletion_flags.try_get(ent).is_some() { + comp_final.set(ent, None); + initials.strong = None; + } else { + entity_deleter.queue(ent); + } + } + + let mut world = system_test!(test_system.build();); + + for _ in 0..3 { + let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(13), Simple7WithFinalizerNoinit]); + let weak = ent.weak(world.get_global::()); + world.get_global::().strong = Some(ent); + + // first iteration + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&weak); + assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(13))); + + // second iteration + world.execute(&tracer::Log(log::Level::Trace)); + + let storage = world.components.get_simple_storage::(); + let comp1 = storage.try_get(&weak); + assert_eq!(comp1, None); + } +} diff --git a/src/system/rw.rs b/src/system/rw.rs deleted file mode 100644 index 04a7a536ee..0000000000 --- a/src/system/rw.rs +++ /dev/null @@ -1,461 +0,0 @@ -use std::marker::PhantomData; -use std::{any, fmt}; - -use rayon::prelude::ParallelIterator; - -use super::accessor; -use crate::entity::ealloc; -use crate::{comp, entity, Archetype}; - -/// Generalizes [`ReadSimple`] and [`ReadIsotope`] for a specific discriminant -/// (through [`ReadIsotope::split`]). -pub trait Read { - /// Returns an immutable reference to the component for the specified entity, - /// or `None` if the component is not present in the entity. - fn try_get>(&self, entity: E) -> Option<&C>; - - /// Returns an immutable reference to the component for the specified entity. - /// - /// # Panics - /// This method panics if the entity is not fully initialized yet. - /// This happens when an entity is newly created and the cycle hasn't joined yet. - fn get>(&self, entity: E) -> &C - where - C: comp::Must, - { - match self.try_get(entity) { - Some(comp) => comp, - None => panic!( - "Component {}/{} implements comp::Must but is not present", - any::type_name::(), - any::type_name::() - ), - } - } - - /// Return value of [`iter`](Self::iter). - type Iter<'t>: Iterator, &'t C)> - where - Self: 't; - /// Iterates over all initialized components in this storage. - fn iter(&self) -> Self::Iter<'_>; - - /// Returns an [`Accessor`](accessor::Accessor) implementor that yields `&C` for each entity. - fn access(&self) -> accessor::MustRead - where - C: comp::Must, - { - accessor::MustRead(self, PhantomData) - } - - /// Returns an [`Accessor`](accessor::Accessor) implementor that yields `Option<&C>` for each entity. - fn try_access(&self) -> accessor::TryRead { accessor::TryRead(self, PhantomData) } - - /// Return value of [`duplicate_immut`](Self::duplicate_immut). - type DuplicateImmut<'t>: Read + 't - where - Self: 't; - /// Duplicates the current reader, - /// producing two new values that can only access the storage immutably. - fn duplicate_immut(&self) -> (Self::DuplicateImmut<'_>, Self::DuplicateImmut<'_>); - - /// Return value of [`par_iter`](Self::par_iter). - type ParIter<'t>: rayon::iter::ParallelIterator, &'t C)> - where - Self: 't, - C: comp::Must; - /// Iterates over chunks of entities in parallel. - /// - /// This returns a [rayon `ParallelIterator`](rayon::iter::ParallelIterator) - /// that processes different chunks of entities - fn par_iter<'t>(&'t self, snapshot: &'t ealloc::Snapshot) -> Self::ParIter<'t> - where - C: comp::Must; -} - -/// Extends [`Read`] with chunk reading ability -/// for storages that support chunked access. -pub trait ReadChunk { - /// Returns the chunk of components as a slice. - /// - /// # Panics - /// This method panics if any component in the chunk is missing. - /// In general, users should not get an [`entity::TempRefChunk`] - /// that includes an uninitialized entity, - /// so panic is basically impossible if [`comp::Must`] was implemented correctly. - fn get_chunk(&self, chunk: entity::TempRefChunk<'_, A>) -> &'_ [C]; - - /// Return value of [`par_iter_chunks`](Self::par_iter_chunks). - type ParIterChunks<'t>: rayon::iter::ParallelIterator< - Item = (entity::TempRefChunk<'t, A>, &'t [C]), - > - where - Self: 't; - /// Iterates over chunks of entities in parallel. - /// - /// This returns a [rayon `ParallelIterator`](rayon::iter::ParallelIterator) - /// that processes different chunks of entities - fn par_iter_chunks<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterChunks<'t>; -} - -/// Access components mutably by the entity. -/// Generalizes [`WriteSimple`], fixed-discriminant [`WriteIsotope`], -/// and their partitioned accessors. -/// -/// Only supports mutable access to an existing component, -/// but does not support adding or removing components -/// since only the storage values but not the storage structure can be borrowed mutably. -pub trait Mut { - /// Returns a mutable reference to the component for the specified entity, - /// or `None` if the component is not present in the entity. - /// - /// Note that this method returns `Option<&mut C>`, not `&mut Option`. - /// This means setting the Option itself to `Some`/`None` will not modify any stored value. - /// Use [`Write::set`] to add/remove a component. - fn try_get_mut>(&mut self, entity: E) -> Option<&mut C>; - - /// Return value of [`iter_mut`](Self::iter_mut). - type IterMut<'t>: Iterator, &'t mut C)> - where - Self: 't; - /// Iterates over mutable references to all initialized components in this storage. - fn iter_mut(&mut self) -> Self::IterMut<'_>; -} - -/// A [`Mut`] accessor that supports all entities, in contrast to [`MutPartition`]. -pub trait MutFull: Mut { - /// The partitioned type for this accessor. - type Partition<'t>: MutPartition<'t, A, C> - where - Self: 't; - /// Converts the accessor to a [`MutPartition`] that covers all entities. - /// - /// The actual splitting partitions can be obtained - /// by calling [`split_at`](MutPartition::split_at) on the returned value. - fn as_partition(&mut self) -> Self::Partition<'_>; - - /// Return value of [`par_iter_mut`](Self::par_iter_mut). - type ParIterMut<'t>: ParallelIterator, &'t mut C)> - where - Self: 't, - C: comp::Must; - /// Iterates over all entities in parallel. - /// - /// This returns a rayon [`ParallelIterator`] that processes different entities. - fn par_iter_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterMut<'t> - where - C: comp::Must; -} - -/// A [`Mut`] accessor that can be split into two halves. -pub trait MutPartition<'t, A: Archetype, C: 'static>: Mut + Send + Sized { - /// Splits the accessor into two partitions. - /// - /// The first partition accesses all entities less than `entity`; - /// the second partition accesses all entities greater than or equal to `entity`. - fn split_at>(self, entity: E) -> (Self, Self); - - /// Return value of [`into_iter_mut`](Self::into_iter_mut). - type IntoIterMut: Iterator, &'t mut C)>; - /// Iterates over mutable references to all initialized components in this storage. - fn into_iter_mut(self) -> Self::IntoIterMut; -} - -/// Extends [`Mut`] with chunk writing ability -/// for storages that support chunked access. -pub trait MutChunk { - /// Returns the chunk of components as a mutable slice. - /// Typically called from an accessor. - /// - /// # Panics - /// This method panics if any component in the chunk is missing. - /// In general, if [`comp::Must`] is implemented correctly, - /// users should not obtain an [`entity::TempRefChunk`] that includes an uninitialized entity, - /// so panic is practically impossible. - fn get_chunk_mut(&mut self, chunk: entity::TempRefChunk<'_, A>) -> &'_ mut [C] - where - C: comp::Must; -} - -/// A [`MutChunk`] accessor that supports all entities, in contrast to [`MutPartitionChunk`]. -pub trait MutFullChunk: MutChunk { - /// The partitioned type for this accessor. - type Partition<'t>: MutPartitionChunk<'t, A, C> - where - Self: 't; - /// Converts the accessor to a [`MutPartitionChunk`] that covers all entities. - /// - /// The actual splitting partitions can be obtained - /// by calling [`split_at`](MutPartition::split_at) on the returned value. - fn as_partition_chunk(&mut self) -> Self::Partition<'_>; - - /// Return value of [`par_iter_chunks_mut`](Self::par_iter_chunks_mut). - type ParIterChunksMut<'t>: ParallelIterator, &'t mut [C])> - where - Self: 't, - C: comp::Must; - /// Iterates over all entity chunks in parallel. - /// - /// This returns a rayon [`ParallelIterator`] that processes different chunks of entities. - fn par_iter_chunks_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterChunksMut<'t> - where - C: comp::Must; -} - -/// A [`Mut`] accessor that can be split into two halves. -pub trait MutPartitionChunk<'t, A: Archetype, C: 'static>: - MutChunk + MutPartition<'t, A, C> -{ - /// Return value of [`into_iter_chunks_mut`](Self::into_iter_chunks_mut). - type IntoIterChunksMut: Iterator, &'t mut [C])>; - /// Iterates over mutable references to all initialized components in this storage. - fn into_iter_chunks_mut(self) -> Self::IntoIterChunksMut; -} - -/// Generalizes [`WriteSimple`] and [`WriteIsotope`] for a specific discriminant -/// (through [`WriteIsotope::split_isotopes`]). -pub trait Write: Read + Mut { - /// Returns a mutable reference to the component for the specified entity. - /// - /// This method is infallible, assuming [`comp::Must`] is only implemented - /// for components with [`Required`](comp::Presence::Required) presence. - fn get_mut>(&mut self, entity: E) -> &mut C - where - C: comp::Must, - { - match self.try_get_mut(entity) { - Some(comp) => comp, - None => panic!( - "Component {}/{} implements comp::Must but is not present", - any::type_name::(), - any::type_name::(), - ), - } - } - - /// Overwrites the component for the specified entity. - /// - /// Passing `None` to this method removes the component from the entity. - /// This leads to a panic for components with [`comp::Presence::Required`] presence. - fn set>(&mut self, entity: E, value: Option) -> Option; - - /// Returns an [`Accessor`](accessor::Accessor) implementor that yields `&C` for each entity. - fn access_mut(&mut self) -> accessor::MustWrite - where - C: comp::Must, - { - accessor::MustWrite(self, PhantomData) - } - - /// Returns an [`Accessor`](accessor::Accessor) implementor that yields `Option<&C>` for each entity. - fn try_access_mut(&mut self) -> accessor::TryWrite { - accessor::TryWrite(self, PhantomData) - } -} - -/// Provides access to a simple component in a specific archetype. -pub trait ReadSimple>: Read { - /// Returns a [`Chunked`](accessor::Chunked) accessor that can be used in - /// [`EntityIterator`](super::EntityIterator) - /// to provide chunked iteration to an entity. - fn access_chunk(&self) -> accessor::MustReadChunkSimple<'_, A, C>; -} - -/// Provides access to a simple component in a specific archetype. -pub trait WriteSimple>: ReadSimple + Write { - /// Returns a [`Chunked`](accessor::Chunked) accessor that can be used in - /// [`EntityIterator`](super::EntityIterator) - /// to provide chunked iteration to an entity. - fn access_chunk_mut(&mut self) -> accessor::MustWriteChunkSimple<'_, A, C>; -} - -/// Provides access to an isotope component in a specific archetype. -/// -/// `K` is the type used to index the discriminant. -/// For partial isotope access, `K` is usually `usize`. -/// For full isotope access, `K` is the discriminant type. -/// -/// Partial isotope access also implements [`ReadIsotopeRef`], -/// which allows using the accessor immutably -/// so that it can be dispatched to multiple workers. -/// -/// For full accessors, getters require a mutable receiver -/// to allow lazy initialization of new discriminants. -/// Consider [splitting](Self::split) accessors, -/// which return [`Read`] with a shared receiver. -/// If it can be asserted that no uninitialized discriminants will be encountered, -/// use with [`known_discrims`](Self::known_discrims). -/// -/// Since mutable receiver is only required for initializing new isotopes, -/// functions that only work on known existing isotops such as [`known_discrims`](Self::known_discrims) -/// only require a shared receiver. -pub trait ReadIsotope, K = >::Discrim> -where - K: fmt::Debug + Copy + 'static, -{ - /// Retrieves the component for the given entity and discriminant. - /// - /// This method is infallible for correctly implemented `comp::Must`, - /// which returns the auto-initialized value for missing components. - fn get>(&mut self, entity: E, discrim: K) -> &C - where - C: comp::Must, - { - match self.try_get(entity, discrim) { - Some(value) => value, - None => panic!( - "{}: comp::Must<{}> but has no default initializer", - any::type_name::(), - any::type_name::() - ), - } - } - - /// Returns an immutable reference to the component for the specified entity and discriminant, - /// or the default value for isotopes with a default initializer or `None` - /// if the component is not present in the entity. - fn try_get>(&mut self, entity: E, discrim: K) -> Option<&C>; - - /// Return value of [`known_discrims`](Self::known_discrims). - type KnownDiscrims<'t>: Iterator>::Discrim> + 't - where - Self: 't; - /// Iterates over all known discriminants of the component type. - /// - /// The yielded discriminants are not in any guaranteed order. - fn known_discrims(&self) -> Self::KnownDiscrims<'_>; - - /// Return value of [`get_all`](Self::get_all). - type GetAll<'t>: Iterator + 't - where - Self: 't; - /// Iterates over all known isotopes for a specific entity. - fn get_all>(&self, entity: E) -> Self::GetAll<'_>; - - /// Return value of [`iter`](Self::iter). - type Iter<'t>: Iterator, &'t C)> - where - Self: 't; - /// Iterates over all components of a specific discriminant. - /// - /// Note that the initializer is not called for lazy-initialized isotope components. - /// To avoid confusing behavior, do not use this function if [`C: comp::Must`](comp::Must). - fn iter(&mut self, discrim: K) -> Self::Iter<'_>; - - /// Return value of [`split`](Self::split). - type Split<'t>: Read + 't - where - Self: 't; - /// Splits the accessor into multiple [`Read`] implementors - /// so that they can be used independently. - fn split(&mut self, keys: [K; N]) -> [Self::Split<'_>; N]; -} - -/// Provides access to an isotope component in a specific archetype -/// without requiring a mutable receiver. -/// Only available for partial storages. -pub trait ReadIsotopeRef, K>: ReadIsotope -where - K: fmt::Debug + Copy + 'static, -{ - /// Retrieves the component for the given entity and discriminant. - /// - /// Identical to [`ReadIsotope::get`] but does not require a mutable receiver. - fn get_ref>(&self, entity: E, key: K) -> &C - where - C: comp::Must, - { - match self.try_get_ref(entity, key) { - Some(value) => value, - None => panic!( - "{}: comp::Must<{}> but has no default initializer", - any::type_name::(), - any::type_name::() - ), - } - } - - /// Returns an immutable reference to the component for the specified entity and discriminant, - /// or the default value for isotopes with a default initializer or `None` - /// if the component is not present in the entity. - /// - /// Identical to [`ReadIsotope::try_get`] but does not require a mutable receiver. - fn try_get_ref>(&self, entity: E, key: K) -> Option<&C>; - - /// Return value of [`iter_ref`](Self::iter_ref). - type IterRef<'t>: Iterator, &'t C)> - where - Self: 't; - /// Iterates over all components of a specific discriminant. - /// - /// Identical to [`ReadIsotope::iter`] but does not require a mutable receiver. - fn iter_ref(&self, key: K) -> Self::IterRef<'_>; -} - -/// Provides access to an isotope component in a specific archetype. -pub trait WriteIsotope, K = >::Discrim>: - ReadIsotope -where - K: fmt::Debug + Copy + 'static, -{ - /// Retrieves the component for the given entity and discriminant. - /// - /// This method is infallible for correctly implemented `comp::Must`, - /// which returns the auto-initialized value for missing components. - fn get_mut>(&mut self, entity: E, discrim: K) -> &mut C - where - C: comp::Must, - { - match self.try_get_mut(entity, discrim) { - Some(value) => value, - None => panic!( - "{}: comp::Must<{}> but has no default initializer", - any::type_name::(), - any::type_name::() - ), - } - } - - /// Returns a mutable reference to the component for the specified entity and discriminant, - /// automatically initialized with the default initializer if present, - /// or `None` if the component is unset and has no default initializer. - /// - /// Note that this method returns `Option<&mut C>`, not `&mut Option`. - /// This means setting the Option itself to `Some`/`None` will not modify any stored value. - /// Use [`WriteIsotope::set`] to add/remove a component. - fn try_get_mut>(&mut self, entity: E, key: K) -> Option<&mut C>; - - /// Overwrites the component for the specified entity and discriminant. - /// - /// Passing `None` to this method removes the component from the entity. - fn set>( - &mut self, - entity: E, - key: K, - value: Option, - ) -> Option; - - /// Return value of [`iter_mut`](Self::iter_mut). - type IterMut<'t>: Iterator, &'t mut C)> - where - Self: 't; - /// Iterates over mutable references to all components of a specific discriminant. - fn iter_mut(&mut self, key: K) -> Self::IterMut<'_>; - - /// Return value of [`split_isotopes`](Self::split_isotopes). - type SplitDiscrim<'t>: Write + 't - where - Self: 't; - /// Splits the accessor into multiple [`Write`] implementors - /// so that they can be used in entity iteration independently. - fn split_isotopes(&mut self, keys: [K; N]) -> [Self::SplitDiscrim<'_>; N]; -} diff --git a/src/test_util.rs b/src/test_util.rs index 8a5d9dae46..11c3332d93 100644 --- a/src/test_util.rs +++ b/src/test_util.rs @@ -1,214 +1,22 @@ #![allow(missing_docs)] #![allow(clippy::too_many_arguments)] -use std::collections::{BTreeMap, BTreeSet, HashMap}; -use std::fmt; -use std::hash::Hash; +use std::collections::BTreeSet; use std::num::NonZeroU32; -use std::sync::Arc; -use std::time::{Duration, Instant}; -use indexmap::IndexSet; -use parking_lot::{Condvar, Mutex, Once}; +use parking_lot::Once; -use crate::entity::{self, ealloc}; -use crate::{comp, global, storage, system, Archetype, Entity}; +use crate::entity::ealloc; +use crate::{system, Archetype}; -/// Records event and ensures that they are in the correct order. -pub struct EventTracer { - dependencies: HashMap>, - seen: Mutex>, -} - -impl EventTracer { - /// Creates a new event tracer that ensures `a` executes after `b` for each `(a, b)` input. - pub fn new(orders: impl IntoIterator) -> Self { - let mut dependencies: HashMap> = HashMap::new(); - for (before, after) in orders { - dependencies.entry(after).or_default().push(before); - } - let seen = Mutex::new(IndexSet::new()); - - Self { dependencies, seen } - } - - /// Records that `event` has happened. - /// - /// # Panics - /// Panics if the same `event` was sent twice or a dependency is not satisfied. - pub fn trace(&self, event: T) { - let mut seen = self.seen.lock(); - - if let Some(deps) = self.dependencies.get(&event) { - for dep in deps { - assert!(seen.contains(dep), "{:?} should happen after {:?}", event, dep); - } - } - - let (index, new) = seen.insert_full(event); - assert!( - !new, - "{:?} is inserted twice", - seen.get_index(index).expect("insert_full should return valid index") - ); - } - - /// Returns the events observed in this tracer. - pub fn get_events(self) -> Vec { - let seen = self.seen.into_inner(); - seen.into_iter().collect() - } -} - -/// An emulated clock that supports ticking. -pub struct Clock { - inner: Mutex>, - check_completeness: Condvar, -} - -struct Inner { - iter: T::Iterator, - now: T, - map: BTreeMap>, -} - -impl Default for Clock { - fn default() -> Self { - let mut iter = T::iter(); - let now = iter.next().expect("Tick enum must not be empty"); - Self { - inner: Mutex::new(Inner { iter, now, map: BTreeMap::new() }), - check_completeness: Condvar::new(), - } - } -} - -impl Clock -where - T::Iterator: Send + Sync, -{ - /// Blocks the thread until the clock ticks `until`. - /// - /// Asserts the current tick is `now`. - pub fn wait(&self, now: T, until: T) { - let mut inner = self.inner.lock(); +mod event_tracer; +pub use event_tracer::EventTracer; - assert!(now < until); - assert!(now == inner.now); +mod clock; +pub use clock::{Clock, Tick}; - let cv = Arc::clone(inner.map.entry(until).or_default()); - cv.wait(&mut inner); - - self.check_completeness.notify_one(); - } - - /// Sets the clock to the next tick. - /// - /// Asserts the current tick is `expect`. - pub(crate) fn tick(&self, expect: T) { - let mut inner = self.inner.lock(); - - let next = inner.iter.next().expect("Tick enum has been exhausted"); - assert!(next == expect); - - inner.now = next; - - if let Some(cv) = inner.map.get(&next) { - cv.notify_all(); - } - } - - /// Orchestrates a test with this clock. - pub(crate) fn orchestrate(&self, mut can_tick_complete: impl FnMut(T) -> bool + Send) { - rayon::scope(|scope| { - scope.spawn(|_| { - let mut inner = self.inner.lock(); - - for (i, tick) in T::iter().enumerate() { - if i > 0 { - self.tick(tick); - } - - let timeout = Instant::now() + Duration::from_secs(5); - - loop { - if can_tick_complete(tick) { - break; - } else { - if timeout < Instant::now() { - panic!( - "Timeout exceeded without fulfilling completeness \ - requirements of {:?}", - tick - ); - } - - self.check_completeness.wait_until(&mut inner, timeout); - } - } - } - }); - }); - } -} - -pub trait Tick: - fmt::Debug + Copy + Eq + Ord + strum::IntoEnumIterator + Send + Sync + Sized -{ -} -impl Tick for T {} - -/// A synchronization util that blocks until sufficiently many threads are waiting concurrently. -/// -/// This is used for testing that multiple threads can run concurrently -/// (in contrast to one blocking the other). -#[derive(Debug)] -pub struct AntiSemaphore { - saturation: usize, - lock: Mutex, - condvar: Condvar, -} - -#[derive(Debug)] -struct AntiSemaphoreInner { - current: usize, -} - -impl AntiSemaphore { - /// Creates a new semaphore. - /// `saturation` is the number of threads that can wait on the lock. - pub fn new(saturation: usize) -> Self { - Self { - saturation, - lock: Mutex::new(AntiSemaphoreInner { current: 0 }), - condvar: Condvar::new(), - } - } - - /// Blocks until the semaphore is saturated. - pub fn wait(&self) { - let mut lock = self.lock.lock(); - log::trace!( - "AntiSemaphore(current: {}, saturation: {}).wait()", - lock.current, - self.saturation - ); - lock.current += 1; - if lock.current > self.saturation { - panic!("AntiSemaphore exceeded saturation"); - } - - if lock.current == self.saturation { - lock.current = 0; - self.condvar.notify_all(); - } else { - let result = self.condvar.wait_for(&mut lock, Duration::from_secs(5)); - if result.timed_out() { - panic!("Deadlock: AntiSemaphore not saturated for more than 5 seconds"); - } - } - } -} +mod anti_semaphore; +pub use anti_semaphore::AntiSemaphore; pub(crate) fn init() { static SET_LOGGER_ONCE: Once = Once::new(); @@ -224,137 +32,27 @@ impl Archetype for TestArch { ealloc::Recycling, ealloc::ThreadRngShardAssigner>; } -/// A test discriminant. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, dynec_codegen::Discrim)] -#[dynec(dynec_as(crate))] -pub struct TestDiscrim1(pub(crate) usize); - -/// An alternative test discriminant. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, dynec_codegen::Discrim)] -#[dynec(dynec_as(crate))] -pub struct TestDiscrim2(pub(crate) usize); - -// Test component summary: -// Simple1: optional, depends [] -// Simple2: optional, depends on Simple2 -// Simple3: optional, depends on Simple1 and Simple2 -// Simple4: optional, depends on Simple1 and Simple2 -// Simple5: required, no init -// Simple6: required, depends [] - -/// optional, non-init, depless -#[comp(dynec_as(crate), of = TestArch)] -#[derive(Debug, PartialEq)] -pub struct Simple1OptionalNoDepNoInit(pub i32); - -/// optional, depends on Simple1 -#[comp(dynec_as(crate), of = TestArch, init = init_comp2/1)] -#[derive(Debug)] -pub struct Simple2OptionalDepends1(pub i32); -fn init_comp2(c1: &Simple1OptionalNoDepNoInit) -> Simple2OptionalDepends1 { - Simple2OptionalDepends1(c1.0 + 2) -} - -/// optional, depends on Simple1 + Simple2 -#[comp( - dynec_as(crate), - of = TestArch, - init = |c1: &Simple1OptionalNoDepNoInit, c2: &Simple2OptionalDepends1| Simple3OptionalDepends12(c1.0 * 3, c2.0 * 5), -)] -#[derive(Debug)] -pub struct Simple3OptionalDepends12(pub i32, pub i32); - -/// optional, depends on Simple1 + Simple2 -#[comp( - dynec_as(crate), - of = TestArch, - init = |c1: &Simple1OptionalNoDepNoInit, c2: &Simple2OptionalDepends1| Simple4Depends12(c1.0 * 7, c2.0 * 8), -)] -#[derive(Debug, PartialEq)] -pub struct Simple4Depends12(pub i32, pub i32); - -/// required, non-init -#[comp(dynec_as(crate), of = TestArch, required)] -#[derive(Debug, PartialEq)] -pub struct Simple5RequiredNoInit(pub i32); - -/// required, auto-init, depless -#[comp(dynec_as(crate), of = TestArch, required, init = || Simple6RequiredWithInitNoDeps(9))] -#[derive(Debug)] -pub struct Simple6RequiredWithInitNoDeps(pub i32); +mod simple_comps; +pub use simple_comps::*; -/// non-init, has finalizers -#[comp(dynec_as(crate), of = TestArch, finalizer)] -pub struct Simple7WithFinalizerNoinit; +mod isotope_comps; +pub use isotope_comps::*; -/// a generic component -pub struct SimpleN(pub i32); - -impl entity::Referrer for SimpleN { - fn visit_type(arg: &mut entity::referrer::VisitTypeArg) { arg.mark::(); } - fn visit_mut(&mut self, _: &mut V) {} -} - -impl comp::SimpleOrIsotope for SimpleN { - const PRESENCE: comp::Presence = comp::Presence::Optional; - const INIT_STRATEGY: comp::InitStrategy = comp::InitStrategy::None; - - type Storage = storage::Vec; -} -impl comp::Simple for SimpleN { - const IS_FINALIZER: bool = false; -} - -/// Does not have auto init -#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim1)] -#[derive(Debug, Clone, PartialEq)] -pub struct IsoNoInit(pub i32); - -/// Has auto init -#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim2, init = || IsoWithInit(73))] -#[derive(Debug, Clone, PartialEq)] -pub struct IsoWithInit(pub i32); - -/// A simple component with a strong reference to [`TestArch`]. -#[comp(dynec_as(crate), of = TestArch)] -pub struct StrongRefSimple(#[entity] pub Entity); - -/// An isotope component with a strong reference to [`TestArch`]. -#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim1)] -pub struct StrongRefIsotope(#[entity] pub Entity); - -/// A generic global state with an initializer. -#[global(dynec_as(crate), initial)] -#[derive(Default)] -pub struct Aggregator { - pub comp30_sum: i32, - pub comp41_product: i32, -} - -/// An entity-referencing global state. -#[global(dynec_as(crate), initial)] -#[derive(Default)] -pub struct InitialEntities { - /// A strong reference. - #[entity] - pub strong: Option>, - /// A weak reference. - #[entity] - pub weak: Option>, -} +mod globals; +pub use globals::*; /// A dummy system used for registering all non-entity-referencing test components. #[system(dynec_as(crate))] pub fn use_all_bare( - _comp1: impl system::ReadSimple, - _comp2: impl system::ReadSimple, - _comp3: impl system::ReadSimple, - _comp4: impl system::ReadSimple, - _comp5: impl system::ReadSimple, - _comp6: impl system::ReadSimple, - _comp_final: impl system::ReadSimple, - _iso1: impl system::ReadIsotope, - _iso2: impl system::ReadIsotope, + _comp1: system::ReadSimple, + _comp2: system::ReadSimple, + _comp3: system::ReadSimple, + _comp4: system::ReadSimple, + _comp5: system::ReadSimple, + _comp6: system::ReadSimple, + _comp_final: system::ReadSimple, + _iso1: system::ReadIsotopeFull, + _iso2: system::ReadIsotopeFull, #[dynec(global)] _agg: &Aggregator, ) { } @@ -362,21 +60,21 @@ pub fn use_all_bare( /// A dummy system with minimally simple dependencies. #[system(dynec_as(crate))] pub fn use_comp_n( - _comp0: impl system::ReadSimple>, - _comp1: impl system::ReadSimple>, - _comp2: impl system::ReadSimple>, - _comp3: impl system::ReadSimple>, - _comp4: impl system::ReadSimple>, - _comp5: impl system::ReadSimple>, - _comp6: impl system::ReadSimple>, - _comp7: impl system::ReadSimple>, - _comp8: impl system::ReadSimple>, - _comp9: impl system::ReadSimple>, - _comp10: impl system::ReadSimple>, - _comp11: impl system::ReadSimple>, - _comp12: impl system::ReadSimple>, - _comp13: impl system::ReadSimple>, - _comp14: impl system::ReadSimple>, - _comp15: impl system::ReadSimple>, + _comp0: system::ReadSimple>, + _comp1: system::ReadSimple>, + _comp2: system::ReadSimple>, + _comp3: system::ReadSimple>, + _comp4: system::ReadSimple>, + _comp5: system::ReadSimple>, + _comp6: system::ReadSimple>, + _comp7: system::ReadSimple>, + _comp8: system::ReadSimple>, + _comp9: system::ReadSimple>, + _comp10: system::ReadSimple>, + _comp11: system::ReadSimple>, + _comp12: system::ReadSimple>, + _comp13: system::ReadSimple>, + _comp14: system::ReadSimple>, + _comp15: system::ReadSimple>, ) { } diff --git a/src/test_util/anti_semaphore.rs b/src/test_util/anti_semaphore.rs new file mode 100644 index 0000000000..bb9fde9d43 --- /dev/null +++ b/src/test_util/anti_semaphore.rs @@ -0,0 +1,55 @@ +use std::time::Duration; + +use parking_lot::{Condvar, Mutex}; + +/// A synchronization util that blocks until sufficiently many threads are waiting concurrently. +/// +/// This is used for testing that multiple threads can run concurrently +/// (in contrast to one blocking the other). +#[derive(Debug)] +pub struct AntiSemaphore { + saturation: usize, + lock: Mutex, + condvar: Condvar, +} + +#[derive(Debug)] +struct AntiSemaphoreInner { + current: usize, +} + +impl AntiSemaphore { + /// Creates a new semaphore. + /// `saturation` is the number of threads that can wait on the lock. + pub fn new(saturation: usize) -> Self { + Self { + saturation, + lock: Mutex::new(AntiSemaphoreInner { current: 0 }), + condvar: Condvar::new(), + } + } + + /// Blocks until the semaphore is saturated. + pub fn wait(&self) { + let mut lock = self.lock.lock(); + log::trace!( + "AntiSemaphore(current: {}, saturation: {}).wait()", + lock.current, + self.saturation + ); + lock.current += 1; + if lock.current > self.saturation { + panic!("AntiSemaphore exceeded saturation"); + } + + if lock.current == self.saturation { + lock.current = 0; + self.condvar.notify_all(); + } else { + let result = self.condvar.wait_for(&mut lock, Duration::from_secs(5)); + if result.timed_out() { + panic!("Deadlock: AntiSemaphore not saturated for more than 5 seconds"); + } + } + } +} diff --git a/src/test_util/clock.rs b/src/test_util/clock.rs new file mode 100644 index 0000000000..f830554503 --- /dev/null +++ b/src/test_util/clock.rs @@ -0,0 +1,104 @@ +use std::collections::BTreeMap; +use std::fmt; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use parking_lot::{Condvar, Mutex}; + +/// An emulated clock that supports ticking. +pub struct Clock { + inner: Mutex>, + check_completeness: Condvar, +} + +struct Inner { + iter: T::Iterator, + now: T, + map: BTreeMap>, +} + +impl Default for Clock { + fn default() -> Self { + let mut iter = T::iter(); + let now = iter.next().expect("Tick enum must not be empty"); + Self { + inner: Mutex::new(Inner { iter, now, map: BTreeMap::new() }), + check_completeness: Condvar::new(), + } + } +} + +impl Clock +where + T::Iterator: Send + Sync, +{ + /// Blocks the thread until the clock ticks `until`. + /// + /// Asserts the current tick is `now`. + pub fn wait(&self, now: T, until: T) { + let mut inner = self.inner.lock(); + + assert!(now < until); + assert!(now == inner.now); + + let cv = Arc::clone(inner.map.entry(until).or_default()); + cv.wait(&mut inner); + + self.check_completeness.notify_one(); + } + + /// Sets the clock to the next tick. + /// + /// Asserts the current tick is `expect`. + pub(crate) fn tick(&self, expect: T) { + let mut inner = self.inner.lock(); + + let next = inner.iter.next().expect("Tick enum has been exhausted"); + assert!(next == expect); + + inner.now = next; + + if let Some(cv) = inner.map.get(&next) { + cv.notify_all(); + } + } + + /// Orchestrates a test with this clock. + pub(crate) fn orchestrate(&self, mut can_tick_complete: impl FnMut(T) -> bool + Send) { + rayon::scope(|scope| { + scope.spawn(|_| { + let mut inner = self.inner.lock(); + + for (i, tick) in T::iter().enumerate() { + if i > 0 { + self.tick(tick); + } + + let timeout = Instant::now() + Duration::from_secs(5); + + loop { + if can_tick_complete(tick) { + break; + } else { + if timeout < Instant::now() { + panic!( + "Timeout exceeded without fulfilling completeness \ + requirements of {:?}", + tick + ); + } + + self.check_completeness.wait_until(&mut inner, timeout); + } + } + } + }); + }); + } +} + +pub trait Tick: + fmt::Debug + Copy + Eq + Ord + strum::IntoEnumIterator + Send + Sync + Sized +{ +} +impl Tick for T {} diff --git a/src/test_util/event_tracer.rs b/src/test_util/event_tracer.rs new file mode 100644 index 0000000000..e6c80d548b --- /dev/null +++ b/src/test_util/event_tracer.rs @@ -0,0 +1,52 @@ +use std::collections::HashMap; +use std::fmt; +use std::hash::Hash; + +use indexmap::IndexSet; +use parking_lot::Mutex; + +/// Records event and ensures that they are in the correct order. +pub struct EventTracer { + dependencies: HashMap>, + seen: Mutex>, +} + +impl EventTracer { + /// Creates a new event tracer that ensures `a` executes after `b` for each `(a, b)` input. + pub fn new(orders: impl IntoIterator) -> Self { + let mut dependencies: HashMap> = HashMap::new(); + for (before, after) in orders { + dependencies.entry(after).or_default().push(before); + } + let seen = Mutex::new(IndexSet::new()); + + Self { dependencies, seen } + } + + /// Records that `event` has happened. + /// + /// # Panics + /// Panics if the same `event` was sent twice or a dependency is not satisfied. + pub fn trace(&self, event: T) { + let mut seen = self.seen.lock(); + + if let Some(deps) = self.dependencies.get(&event) { + for dep in deps { + assert!(seen.contains(dep), "{:?} should happen after {:?}", event, dep); + } + } + + let (index, new) = seen.insert_full(event); + assert!( + !new, + "{:?} is inserted twice", + seen.get_index(index).expect("insert_full should return valid index") + ); + } + + /// Returns the events observed in this tracer. + pub fn get_events(self) -> Vec { + let seen = self.seen.into_inner(); + seen.into_iter().collect() + } +} diff --git a/src/test_util/globals.rs b/src/test_util/globals.rs new file mode 100644 index 0000000000..5a02b2d8a2 --- /dev/null +++ b/src/test_util/globals.rs @@ -0,0 +1,22 @@ +use super::TestArch; +use crate::{entity, global, Entity}; + +/// A generic global state with an initializer. +#[global(dynec_as(crate), initial)] +#[derive(Default)] +pub struct Aggregator { + pub comp30_sum: i32, + pub comp41_product: i32, +} + +/// An entity-referencing global state. +#[global(dynec_as(crate), initial)] +#[derive(Default)] +pub struct InitialEntities { + /// A strong reference. + #[entity] + pub strong: Option>, + /// A weak reference. + #[entity] + pub weak: Option>, +} diff --git a/src/test_util/isotope_comps.rs b/src/test_util/isotope_comps.rs new file mode 100644 index 0000000000..777d70d976 --- /dev/null +++ b/src/test_util/isotope_comps.rs @@ -0,0 +1,28 @@ +use std::hash::Hash; + +use super::TestArch; +use crate::{comp, Entity}; + +/// A test discriminant. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, dynec_codegen::Discrim)] +#[dynec(dynec_as(crate))] +pub struct TestDiscrim1(pub(crate) usize); + +/// An alternative test discriminant. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, dynec_codegen::Discrim)] +#[dynec(dynec_as(crate))] +pub struct TestDiscrim2(pub(crate) usize); + +/// Does not have auto init +#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim1)] +#[derive(Debug, Clone, PartialEq)] +pub struct IsoNoInit(pub i32); + +/// Has auto init +#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim2, init = || IsoWithInit(73), required)] +#[derive(Debug, Clone, PartialEq)] +pub struct IsoWithInit(pub i32); + +/// An isotope component with a strong reference to [`TestArch`]. +#[comp(dynec_as(crate), of = TestArch, isotope = TestDiscrim1)] +pub struct StrongRefIsotope(#[entity] pub Entity); diff --git a/src/test_util/simple_comps.rs b/src/test_util/simple_comps.rs new file mode 100644 index 0000000000..d95b211c81 --- /dev/null +++ b/src/test_util/simple_comps.rs @@ -0,0 +1,80 @@ +use std::num::NonZeroU32; + +use super::TestArch; +use crate::entity::{self}; +use crate::{comp, storage, Entity}; + +// Test component summary: +// Simple1: optional, depends [] +// Simple2: optional, depends on Simple2 +// Simple3: optional, depends on Simple1 and Simple2 +// Simple4: optional, depends on Simple1 and Simple2 +// Simple5: required, no init +// Simple6: required, depends [] + +/// optional, non-init, depless +#[comp(dynec_as(crate), of = TestArch)] +#[derive(Debug, PartialEq)] +pub struct Simple1OptionalNoDepNoInit(pub i32); + +/// optional, depends on Simple1 +#[comp(dynec_as(crate), of = TestArch, init = init_comp2/1)] +#[derive(Debug)] +pub struct Simple2OptionalDepends1(pub i32); +fn init_comp2(c1: &Simple1OptionalNoDepNoInit) -> Simple2OptionalDepends1 { + Simple2OptionalDepends1(c1.0 + 2) +} + +/// optional, depends on Simple1 + Simple2 +#[comp( + dynec_as(crate), + of = TestArch, + init = |c1: &Simple1OptionalNoDepNoInit, c2: &Simple2OptionalDepends1| Simple3OptionalDepends12(c1.0 * 3, c2.0 * 5), +)] +#[derive(Debug)] +pub struct Simple3OptionalDepends12(pub i32, pub i32); + +/// optional, depends on Simple1 + Simple2 +#[comp( + dynec_as(crate), + of = TestArch, + init = |c1: &Simple1OptionalNoDepNoInit, c2: &Simple2OptionalDepends1| Simple4Depends12(c1.0 * 7, c2.0 * 8), +)] +#[derive(Debug, PartialEq)] +pub struct Simple4Depends12(pub i32, pub i32); + +/// required, non-init +#[comp(dynec_as(crate), of = TestArch, required)] +#[derive(Debug, PartialEq)] +pub struct Simple5RequiredNoInit(pub i32); + +/// required, auto-init, depless +#[comp(dynec_as(crate), of = TestArch, required, init = || Simple6RequiredWithInitNoDeps(9))] +#[derive(Debug)] +pub struct Simple6RequiredWithInitNoDeps(pub i32); + +/// non-init, has finalizers +#[comp(dynec_as(crate), of = TestArch, finalizer)] +pub struct Simple7WithFinalizerNoinit; + +/// a generic component +pub struct SimpleN(pub i32); + +impl entity::Referrer for SimpleN { + fn visit_type(arg: &mut entity::referrer::VisitTypeArg) { arg.mark::(); } + fn visit_mut(&mut self, _: &mut V) {} +} + +impl comp::SimpleOrIsotope for SimpleN { + const PRESENCE: comp::Presence = comp::Presence::Optional; + const INIT_STRATEGY: comp::InitStrategy = comp::InitStrategy::None; + + type Storage = storage::Vec; +} +impl comp::Simple for SimpleN { + const IS_FINALIZER: bool = false; +} + +/// A simple component with a strong reference to [`TestArch`]. +#[comp(dynec_as(crate), of = TestArch)] +pub struct StrongRefSimple(#[entity] pub Entity); diff --git a/src/util.rs b/src/util.rs index 7cf63a694c..8927be1ee2 100644 --- a/src/util.rs +++ b/src/util.rs @@ -4,8 +4,7 @@ use std::any; use std::any::TypeId; use std::borrow::Borrow; -use std::num::NonZeroU32; -use std::{cmp, fmt, hash, mem, ops}; +use std::{cmp, fmt, hash, mem, num, ops}; /// A generic mutable/immutable reference type. pub trait Ref { @@ -48,6 +47,18 @@ where fn deref(&self) -> &Self::Target { &self.0 } } +/// A container that implements [`ops::Deref`]/[`ops::DerefMut`] +/// without any special logic. +pub struct OwnedDeref(pub T); + +impl ops::Deref for OwnedDeref { + type Target = T; + fn deref(&self) -> &T { &self.0 } +} +impl ops::DerefMut for OwnedDeref { + fn deref_mut(&mut self) -> &mut T { &mut self.0 } +} + /// A TypeId that may include type name for debugging. #[derive(Debug, Clone, Copy)] pub struct DbgTypeId { @@ -116,9 +127,17 @@ impl Borrow for DbgTypeId { /// Undefined behavior may occur if the invariants of `Eq` and `Ord` are not fully satisfied. pub unsafe trait UnsafeEqOrd: Eq + Ord {} +// Safety: NonZeroU16 is semantically identical to `u16`, +// which is a regular primitive satisfying all equivalence and ordering invariants. +unsafe impl UnsafeEqOrd for num::NonZeroU16 {} + // Safety: NonZeroU32 is semantically identical to `u32`, // which is a regular primitive satisfying all equivalence and ordering invariants. -unsafe impl UnsafeEqOrd for NonZeroU32 {} +unsafe impl UnsafeEqOrd for num::NonZeroU32 {} + +// Safety: NonZeroU64 is semantically identical to `u64`, +// which is a regular primitive satisfying all equivalence and ordering invariants. +unsafe impl UnsafeEqOrd for num::NonZeroU64 {} // Safety: `usize` is a regular primitive satisfying all equivalence and ordering invariants. unsafe impl UnsafeEqOrd for usize {} diff --git a/src/world.rs b/src/world.rs index 042db7ac30..08d6fee25d 100644 --- a/src/world.rs +++ b/src/world.rs @@ -94,6 +94,7 @@ pub struct World { impl World { /// Executes all systems in the world. pub fn execute(&mut self, tracer: &impl Tracer) { + self.ealloc_map.flush_if_marked(); self.scheduler.execute( tracer, &mut self.components, @@ -163,18 +164,24 @@ impl World { /// /// The return value indicates whether the entity can be deleted *immediately*. pub fn delete(&mut self, entity: E) -> DeleteResult { + self.ealloc_map.mark_need_flush::(); + let id = entity.id(); drop(entity); // drop `entity` so that its refcount is removed let (world, mut systems) = self.as_mut(); let result = flag_delete_entity::(id, world, &mut systems[..]); - if let DeleteResult::Terminating = result { - self.scheduler.offline_buffer().rerun_queue.push(Box::new(offline::DeleteEntity::< - E::Archetype, - > { - entity: id, - }) - as Box); + + match result { + DeleteResult::Deleted => {} + DeleteResult::Terminating => { + self.scheduler.offline_buffer().rerun_queue.push(Box::new(offline::DeleteEntity::< + E::Archetype, + > { + entity: id, + }) + as Box); + } } result @@ -256,7 +263,7 @@ fn init_entity( } /// Result of deleting an entity. -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum DeleteResult { /// The entity has been immediately deleted. Deleted, diff --git a/src/world/builder.rs b/src/world/builder.rs index 5396671fe4..d7c0840f48 100644 --- a/src/world/builder.rs +++ b/src/world/builder.rs @@ -178,7 +178,12 @@ impl Builder { } /// Schedules a thread-safe system. - pub fn schedule(&mut self, system: Box) { + pub fn schedule(&mut self, system: impl system::Sendable) { + self.schedule_boxed(Box::new(system)) + } + + /// Schedules a thread-safe system. + pub fn schedule_boxed(&mut self, system: Box) { let mut type_visitor = referrer::VisitTypeArg::new(); system.visit_type(&mut type_visitor); let state_maybe_uninit = system.state_maybe_uninit(); @@ -188,7 +193,12 @@ impl Builder { } /// Schedules a system that must be run on the main thread. - pub fn schedule_thread_unsafe(&mut self, system: Box) { + pub fn schedule_thread_unsafe(&mut self, system: impl system::Unsendable) { + self.schedule_thread_unsafe_boxed(Box::new(system)) + } + + /// Schedules a system that must be run on the main thread. + pub fn schedule_thread_unsafe_boxed(&mut self, system: Box) { let mut type_visitor = referrer::VisitTypeArg::new(); system.visit_type(&mut type_visitor); let state_maybe_uninit = system.state_maybe_uninit(); diff --git a/src/world/rw.rs b/src/world/rw.rs index e511ec3154..aedd6b41cb 100644 --- a/src/world/rw.rs +++ b/src/world/rw.rs @@ -8,9 +8,7 @@ use crate::util::DbgTypeId; use crate::Archetype; pub(crate) mod isotope; -mod partition; pub(crate) mod simple; -use partition::{mut_owned_par_iter_chunks_mut, mut_owned_par_iter_mut, PartitionAccessor}; /// Stores the component states in a world. pub struct Components { diff --git a/src/world/rw/isotope.rs b/src/world/rw/isotope.rs index 5c7c128caf..cc61a512ef 100644 --- a/src/world/rw/isotope.rs +++ b/src/world/rw/isotope.rs @@ -4,12 +4,8 @@ use std::fmt; use crate::{comp, storage, world, Archetype}; mod offline; -mod read; -mod write; - -struct Base { - getter: T, -} +pub(crate) mod read; +pub(crate) mod write; fn storage_map>( comps: &world::Components, diff --git a/src/world/rw/isotope/offline.rs b/src/world/rw/isotope/offline.rs index 5c7f867552..537984bbc6 100644 --- a/src/world/rw/isotope/offline.rs +++ b/src/world/rw/isotope/offline.rs @@ -1,7 +1,8 @@ use std::any::{type_name, TypeId}; use std::sync::Arc; -use crate::{comp, entity, world, Archetype, Storage as _}; +use crate::storage::Access as _; +use crate::{comp, entity, world, Archetype}; impl world::Components { fn offline_isotope_storage(&mut self, discrim: C::Discrim) -> Option<&mut C::Storage> diff --git a/src/world/rw/isotope/read.rs b/src/world/rw/isotope/read.rs index 3285e81539..b4ebba9d1a 100644 --- a/src/world/rw/isotope/read.rs +++ b/src/world/rw/isotope/read.rs @@ -1,19 +1,13 @@ use std::any::type_name; -use std::marker::PhantomData; use std::sync::Arc; -use std::{fmt, ops}; use parking_lot::lock_api::ArcRwLockReadGuard; use parking_lot::RwLock; -use rayon::prelude::ParallelIterator; -use crate::entity::ealloc; -use crate::storage::Chunked as _; -use crate::world::rw::isotope; -use crate::{comp, entity, storage, system, Archetype, Storage as _}; +use crate::{comp, Archetype}; -pub(super) mod full; -pub(super) mod partial; +pub(crate) mod full; +pub(crate) mod partial; type LockedStorage = ArcRwLockReadGuard>::Storage>; @@ -34,222 +28,3 @@ fn own_storage>( ), } } - -/// Abstracts the storage access pattern for an accessor type. -pub(super) trait StorageGet -where - A: Archetype, - C: comp::Isotope, -{ - /// The key from the user, equivalent to [`comp::discrim::Set::Key`] - type Key: fmt::Debug + Copy + 'static; - - /// Retrieves a storage by key. - /// Panics if the key is not supported. - /// - /// For partial accessors, this should return the storage - /// for the discriminant indexed by the key, - /// or panic if the key is out of bounds. - /// - /// For full accessors, this should return the storage for the given discriminant, - /// or initialize the storage lazily. - fn get_storage(&mut self, key: Self::Key) -> &C::Storage; - - /// Equivalent to calling [`Self::get_storage`] for each key. - /// - /// Duplicate keys are allowed because the return type is immutable. - /// The mutability is only used for lazy initialization. - fn get_storage_many(&mut self, keys: [Self::Key; N]) -> [&C::Storage; N]; - - /// Return value of [`iter_keys`](Self::iter_keys). - type IterKeys<'t>: Iterator + 't - where - Self: 't; - /// Iterates over all keys currently accessible from this accessor. - /// - /// For partial accessors, this is the set of keys to the discriminants provided by the user. - /// - /// For full accessors, this is the set of discriminants that have been initialized. - fn iter_keys(&self) -> Self::IterKeys<'_>; - - /// Storage type yielded by [`iter_values`](Self::iter_values). - type IterValue: ops::Deref; - /// Return value of [`iter_values`](Self::iter_values). - type IterValues<'t>: Iterator + 't - where - Self: 't; - /// Iterates over all storages currently accessible from this accessor. - /// - /// For partial accessors, this is the set of keys to the discriminants provided by the user. - /// - /// For full accessors, this is the set of discriminants that have been initialized. - fn iter_values(&self) -> Self::IterValues<'_>; -} - -pub(super) trait StorageGetRef -where - A: Archetype, - C: comp::Isotope, - Self: isotope::read::StorageGet, -{ - fn get_storage_ref(&self, key: Self::Key) -> &C::Storage; -} - -impl system::ReadIsotope for isotope::Base -where - A: Archetype, - C: comp::Isotope, - GetterT: StorageGet, -{ - fn try_get>( - &mut self, - entity: E, - key: GetterT::Key, - ) -> Option<&C> { - let storage = self.getter.get_storage(key); - storage.get(entity.id()) - } - - type KnownDiscrims<'t> = impl Iterator>::Discrim> + 't where - Self: 't; - fn known_discrims(&self) -> Self::KnownDiscrims<'_> { - self.getter.iter_keys().map(|(_key, discrim)| discrim) - } - - type GetAll<'t> = impl Iterator + 't where Self: 't; - fn get_all>(&self, entity: E) -> Self::GetAll<'_> { - // workaround for https://github.com/rust-lang/rust/issues/65442 - fn without_e( - getter: &impl StorageGet, - id: A::RawEntity, - ) -> impl Iterator + '_ - where - A: Archetype, - C: comp::Isotope, - { - getter - .iter_values() - .filter_map(move |(_key, discrim, storage)| Some((discrim, storage.get(id)?))) - } - - without_e(&self.getter, entity.id()) - } - - type Iter<'t> = impl Iterator, &'t C)> - where - Self: 't; - - fn iter(&mut self, key: GetterT::Key) -> Self::Iter<'_> { - let storage = self.getter.get_storage(key); - storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } - - type Split<'t> = impl system::Read + 't - where - Self: 't; - - fn split(&mut self, keys: [GetterT::Key; N]) -> [Self::Split<'_>; N] { - let storages = self.getter.get_storage_many(keys); - storages.map(|storage| SplitReader { storage, _ph: PhantomData }) - } -} - -impl system::ReadIsotopeRef for isotope::Base -where - A: Archetype, - C: comp::Isotope, - GetterT: StorageGetRef, -{ - fn try_get_ref>( - &self, - entity: E, - key: GetterT::Key, - ) -> Option<&C> { - let storage = self.getter.get_storage_ref(key); - storage.get(entity.id()) - } - - type IterRef<'t> = impl Iterator, &'t C)> where Self: 't; - fn iter_ref(&self, key: GetterT::Key) -> Self::IterRef<'_> { - let storage = self.getter.get_storage_ref(key); - storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } -} - -pub(super) struct SplitReader<'t, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - pub(super) storage: &'t C::Storage, - pub(super) _ph: PhantomData<(A, C)>, -} - -impl<'u, A, C> system::Read for SplitReader<'u, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - fn try_get>(&self, entity: E) -> Option<&C> { - self.storage.get(entity.id()) - } - - type Iter<'t> = impl Iterator, &'t C)> where Self: 't; - - fn iter(&self) -> Self::Iter<'_> { - self.storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } - - type DuplicateImmut<'t> = impl system::Read + 't where Self: 't; - - fn duplicate_immut(&self) -> (Self::DuplicateImmut<'_>, Self::DuplicateImmut<'_>) { - ( - Self { storage: self.storage, _ph: PhantomData }, - Self { storage: self.storage, _ph: PhantomData }, - ) - } - - type ParIter<'t> = impl rayon::iter::ParallelIterator, &'t C)> where Self: 't, C: comp::Must; - fn par_iter<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot<::RawEntity>, - ) -> Self::ParIter<'t> - where - C: comp::Must, - { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - slice.iter_chunks().flat_map(::range).map(|id| { - let entity = entity::TempRef::new(id); - let data = self.get(entity); - (entity, data) - }) - }) - } -} - -impl<'u, A, C> system::ReadChunk for SplitReader<'u, A, C> -where - A: Archetype, - C: comp::Isotope + comp::Must, - C::Storage: storage::Chunked, -{ - fn get_chunk(&self, chunk: entity::TempRefChunk<'_, A>) -> &[C] { - self.storage.get_chunk(chunk.start, chunk.end).expect("chunk is not completely filled") - } - - type ParIterChunks<'t> = impl rayon::iter::ParallelIterator, &'t [C])> where Self: 't; - fn par_iter_chunks<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterChunks<'t> { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - // we don't need to split over the holes in parallel, - // because splitting the total space is more important than splitting the holes - slice.iter_chunks().map(|chunk| { - let chunk = entity::TempRefChunk::new(chunk.start, chunk.end); - let data = self.get_chunk(chunk); - (chunk, data) - }) - }) - } -} diff --git a/src/world/rw/isotope/read/full.rs b/src/world/rw/isotope/read/full.rs index 0f59c06a14..8f2c1d6a05 100644 --- a/src/world/rw/isotope/read/full.rs +++ b/src/world/rw/isotope/read/full.rs @@ -3,9 +3,19 @@ use std::marker::PhantomData; use crate::comp::discrim::{FullMap, Mapped as _}; use crate::comp::Discrim; use crate::entity::ealloc; +use crate::system::access::StorageMap; use crate::world::rw::isotope; use crate::{comp, storage, system, world, Archetype}; +/// Provides access to an isotope component in a specific archetype. +/// +/// Getters require a mutable receiver to allow lazy initialization of new discriminants. +/// Consider [splitting](system::AccessIsotope::split) accessors, +/// which returns a [`system::AccessSingle`] with a shared receiver. +/// If it can be asserted that no uninitialized discriminants will be encountered, +/// use with [`known_discrims`](system::AccessIsotope::known_discrims). +pub type ReadIsotopeFull<'t, A, C> = system::AccessIsotope>; + impl world::Components { /// Immutably access all discriminants of an isotope storage, /// lazily initializing new isotopes during usage. @@ -14,7 +24,7 @@ impl world::Components { /// in order to lazily initialize new isotopes, /// but multiple immutable accessors can still run concurrently /// with lock contention only occurring when new discriminants are encountered. - /// See the documentation of [`ReadIsotope`](system::ReadIsotope) for details. + /// See the documentation of [`ReadIsotopeFull`](system::ReadIsotopeFull) for details. /// /// # Panics /// - if the archetyped component is not used in any systems. @@ -22,7 +32,7 @@ impl world::Components { pub fn read_full_isotope_storage( &self, snapshot: ealloc::Snapshot, - ) -> impl system::ReadIsotope + '_ + ) -> ReadIsotopeFull where A: Archetype, C: comp::Isotope, @@ -42,13 +52,16 @@ impl world::Components { .collect() }; - isotope::Base { - getter: Getter { full_map: storage_map, accessor_storages, snapshot, _ph: PhantomData }, - } + system::AccessIsotope::new(Storages { + full_map: storage_map, + accessor_storages, + snapshot, + _ph: PhantomData, + }) } } -struct Getter<'u, A, C> +pub struct Storages<'u, A, C> where A: Archetype, C: comp::Isotope, @@ -59,7 +72,7 @@ where _ph: PhantomData<(A, C)>, } -impl<'u, A, C> isotope::read::StorageGet for Getter<'u, A, C> +impl<'u, A, C> StorageMap for Storages<'u, A, C> where A: Archetype, C: comp::Isotope, diff --git a/src/world/rw/isotope/read/partial.rs b/src/world/rw/isotope/read/partial.rs index 1fcd5847da..72f07d0051 100644 --- a/src/world/rw/isotope/read/partial.rs +++ b/src/world/rw/isotope/read/partial.rs @@ -2,14 +2,29 @@ use std::marker::PhantomData; use crate::comp::discrim::{self, Mapped as _}; use crate::entity::ealloc; +use crate::system::access::{PartialStorageMap, StorageMap}; use crate::world::rw::isotope; use crate::{comp, system, world, Archetype}; +/// Provides access to an isotope component in a specific archetype. +/// +/// `DiscrimSetKey` is the type used to index the discriminant, +/// depending on the type of discriminant set given. +/// For vec and array discriminant sets, this should be `usize`. +/// For partial isotope access, `K` is usually `usize`. +/// For full isotope access, `K` is the discriminant type. +/// +/// To share the same API as [`ReadIsotopeFull`](system::ReadIsotopeFull), +/// immutable getters still require `&mut self`, +/// but there are `*_ref` variants for these functions that just require `&self`. +pub type ReadIsotopePartial>::Discrim>> = + system::AccessIsotope>; + impl world::Components { /// Immutably access the requested discriminants of an isotope storage, /// lazily initializing new isotopes in `discrims` immediately. /// - /// The return value implements [`ReadIsotopeRef`](system::ReadIsotopeRef), + /// The return value provides `*_ref` getters, /// allowing shared use of this accessor on multiple workers. /// /// # Panics @@ -19,7 +34,7 @@ impl world::Components { &'t self, discrims: &'t DiscrimSet, snapshot: ealloc::Snapshot, - ) -> impl system::ReadIsotopeRef + 't + ) -> ReadIsotopePartial where A: Archetype, C: comp::Isotope, @@ -36,11 +51,11 @@ impl world::Components { }) }; - isotope::Base { getter: Getter:: { storages, _ph: PhantomData } } + system::AccessIsotope::new(Storages:: { storages, _ph: PhantomData }) } } -struct Getter +pub struct Storages where A: Archetype, C: comp::Isotope, @@ -50,7 +65,7 @@ where _ph: PhantomData<(A, C)>, } -impl isotope::read::StorageGet for Getter +impl StorageMap for Storages where A: Archetype, C: comp::Isotope, @@ -87,7 +102,7 @@ where } } -impl isotope::read::StorageGetRef for Getter +impl PartialStorageMap for Storages where A: Archetype, C: comp::Isotope, diff --git a/src/world/rw/isotope/write.rs b/src/world/rw/isotope/write.rs index 7dafb5039a..18c71f9888 100644 --- a/src/world/rw/isotope/write.rs +++ b/src/world/rw/isotope/write.rs @@ -1,18 +1,13 @@ use std::any::type_name; -use std::marker::PhantomData; use std::sync::Arc; use parking_lot::lock_api::ArcRwLockWriteGuard; use parking_lot::RwLock; -use rayon::prelude::ParallelIterator; -use crate::entity::ealloc; -use crate::storage::Chunked; -use crate::world::rw::{self, isotope}; -use crate::{comp, entity, storage, system, Archetype, Storage}; +use crate::{comp, Archetype}; -pub(super) mod full; -pub(super) mod partial; +pub(crate) mod full; +pub(crate) mod partial; type LockedStorage = ArcRwLockWriteGuard>::Storage>; @@ -32,246 +27,3 @@ fn own_storage>( ), } } - -pub(super) trait StorageGetMut -where - A: Archetype, - C: comp::Isotope, - Self: isotope::read::StorageGet, -{ - /// Retrieves a storage by key. - /// Panics if the key is not supported. - /// - /// For partial accessors, this should return the storage - /// for the discriminant indexed by the key, - /// or panic if the key is out of bounds. - /// - /// For full accessors, this should return the storage for the given discriminant, - /// or initialize the storage lazily. - fn get_storage_mut(&mut self, key: Self::Key) -> &mut C::Storage; - - /// Retrieves storages by disjoint keys. - /// Panics if any key is not supported or is equal to another key. - fn get_storage_mut_many( - &mut self, - keys: [Self::Key; N], - ) -> [&mut C::Storage; N]; -} - -impl system::WriteIsotope for isotope::Base -where - A: Archetype, - C: comp::Isotope, - GetterT: StorageGetMut, -{ - fn try_get_mut>( - &mut self, - entity: E, - key: GetterT::Key, - ) -> Option<&mut C> { - let storage = self.getter.get_storage_mut(key); - storage.get_mut(entity.id()) - } - - fn set>( - &mut self, - entity: E, - key: GetterT::Key, - value: Option, - ) -> Option { - let storage = self.getter.get_storage_mut(key); - storage.set(entity.id(), value) - } - - type IterMut<'t> = impl Iterator, &'t mut C)> - where - Self: 't; - fn iter_mut(&mut self, key: GetterT::Key) -> Self::IterMut<'_> { - let storage = self.getter.get_storage_mut(key); - storage.iter_mut().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } - - type SplitDiscrim<'t> = impl system::Write + 't - where - Self: 't; - fn split_isotopes( - &mut self, - keys: [GetterT::Key; N], - ) -> [Self::SplitDiscrim<'_>; N] { - let storages = self.getter.get_storage_mut_many(keys); - storages.map(|storage| SplitWriter { storage, _ph: PhantomData }) - } -} - -struct SplitWriter<'t, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - storage: &'t mut C::Storage, - _ph: PhantomData<(A, C)>, -} - -impl<'u, A, C> system::Read for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - fn try_get>(&self, entity: E) -> Option<&C> { - self.storage.get(entity.id()) - } - - type Iter<'t> = impl Iterator, &'t C)> where Self: 't; - - fn iter(&self) -> Self::Iter<'_> { - self.storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } - - type DuplicateImmut<'t> = impl system::Read + 't where Self: 't; - - fn duplicate_immut(&self) -> (Self::DuplicateImmut<'_>, Self::DuplicateImmut<'_>) { - ( - isotope::read::SplitReader { storage: self.storage, _ph: PhantomData }, - isotope::read::SplitReader { storage: self.storage, _ph: PhantomData }, - ) - } - - type ParIter<'t> = impl rayon::iter::ParallelIterator, &'t C)> where Self: 't, C: comp::Must; - fn par_iter<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot<::RawEntity>, - ) -> Self::ParIter<'t> - where - C: comp::Must, - { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - slice.iter_chunks().flat_map(::range).map(|id| { - let entity = entity::TempRef::new(id); - let data = self.get(entity); - (entity, data) - }) - }) - } -} - -impl<'u, A, C> system::ReadChunk for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope + comp::Must, - C::Storage: storage::Chunked, -{ - fn get_chunk(&self, chunk: entity::TempRefChunk<'_, A>) -> &[C] { - self.storage.get_chunk(chunk.start, chunk.end).expect("chunk is not completely filled") - } - - type ParIterChunks<'t> = impl rayon::iter::ParallelIterator, &'t [C])> where Self: 't; - fn par_iter_chunks<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterChunks<'t> { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - // we don't need to split over the holes in parallel, - // because splitting the total space is more important than splitting the holes - slice.iter_chunks().map(|chunk| { - let chunk = entity::TempRefChunk::new(chunk.start, chunk.end); - let data = self.get_chunk(chunk); - (chunk, data) - }) - }) - } -} - -impl<'u, A, C> system::Mut for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - fn try_get_mut>(&mut self, entity: E) -> Option<&mut C> { - self.storage.get_mut(entity.id()) - } - - type IterMut<'t> = impl Iterator, &'t mut C)> - where - Self: 't; - fn iter_mut(&mut self) -> Self::IterMut<'_> { - self.storage.iter_mut().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } -} - -impl<'u, A, C> system::MutFull for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - type Partition<'t> = - rw::PartitionAccessor<'t, A, C, ::Partition<'t>> where Self: 't; - fn as_partition(&mut self) -> Self::Partition<'_> { - rw::PartitionAccessor { partition: self.storage.as_partition(), _ph: PhantomData } - } - - type ParIterMut<'t> = impl ParallelIterator, &'t mut C)> where Self: 't, C: comp::Must; - fn par_iter_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterMut<'t> - where - C: comp::Must, - { - rw::mut_owned_par_iter_mut(self.as_partition(), snapshot) - } -} - -impl<'u, A, C> system::MutChunk for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope + comp::Must, - C::Storage: storage::Chunked, -{ - fn get_chunk_mut(&mut self, chunk: entity::TempRefChunk<'_, A>) -> &mut [C] - where - C: comp::Must, - { - self.storage.get_chunk_mut(chunk.start, chunk.end).expect("chunk is not completely filled") - } -} - -impl<'u, A, C> system::MutFullChunk for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope + comp::Must, - C::Storage: storage::Chunked, -{ - type Partition<'t> = impl system::MutPartitionChunk<'t, A, C> - where - Self: 't; - fn as_partition_chunk(&mut self) -> Self::Partition<'_> { - rw::PartitionAccessor { - partition: self.storage.as_partition_chunk(), - _ph: PhantomData, - } - } - - type ParIterChunksMut<'t> = impl ParallelIterator, &'t mut [C])> - where - Self: 't, - C: comp::Must; - fn par_iter_chunks_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot<::RawEntity>, - ) -> Self::ParIterChunksMut<'t> - where - C: comp::Must, - { - rw::mut_owned_par_iter_chunks_mut(self.as_partition_chunk(), snapshot) - } -} - -impl<'u, A, C> system::Write for SplitWriter<'u, A, C> -where - A: Archetype, - C: comp::Isotope, -{ - fn set>(&mut self, entity: E, value: Option) -> Option { - self.storage.set(entity.id(), value) - } -} diff --git a/src/world/rw/isotope/write/full.rs b/src/world/rw/isotope/write/full.rs index 8af0633a81..bcdacf2ebe 100644 --- a/src/world/rw/isotope/write/full.rs +++ b/src/world/rw/isotope/write/full.rs @@ -6,9 +6,19 @@ use parking_lot::MutexGuard; use crate::comp::discrim::{FullMap as _, Mapped as _}; use crate::comp::{self, Discrim}; use crate::entity::ealloc; +use crate::system::access::{StorageMap, StorageMapMut}; use crate::world::rw::isotope; use crate::{storage, system, world, Archetype}; +/// Provides access to an isotope component in a specific archetype. +/// +/// Getters require a mutable receiver to allow lazy initialization of new discriminants. +/// Consider [splitting](system::AccessIsotope::split) accessors, +/// which returns a [`system::AccessSingle`] with a shared receiver. +/// If it can be asserted that no uninitialized discriminants will be encountered, +/// use with [`known_discrims`](system::AccessIsotope::known_discrims). +pub type WriteIsotopeFull<'t, A, C> = system::AccessIsotope>; + impl world::Components { /// Mutably access all discriminants of an isotope storage, /// lazily initializing new isotopes during usage. @@ -19,7 +29,7 @@ impl world::Components { pub fn write_full_isotope_storage( &self, snapshot: ealloc::Snapshot, - ) -> impl system::WriteIsotope + '_ + ) -> WriteIsotopeFull where A: Archetype, C: comp::Isotope, @@ -43,11 +53,16 @@ impl world::Components { }) .collect(); - isotope::Base { getter: Getter { full_map, accessor_storages, snapshot, _ph: PhantomData } } + system::AccessIsotope::new(Storages { + full_map, + accessor_storages, + snapshot, + _ph: PhantomData, + }) } } -struct Getter<'u, A, C> +pub struct Storages<'u, A, C> where A: Archetype, C: comp::Isotope, @@ -58,7 +73,7 @@ where _ph: PhantomData<(A, C)>, } -impl<'u, A, C> isotope::read::StorageGet for Getter<'u, A, C> +impl<'u, A, C> StorageMap for Storages<'u, A, C> where A: Archetype, C: comp::Isotope, @@ -106,7 +121,7 @@ where } } -impl<'u, A, C> isotope::write::StorageGetMut for Getter<'u, A, C> +impl<'u, A, C> StorageMapMut for Storages<'u, A, C> where A: Archetype, C: comp::Isotope, diff --git a/src/world/rw/isotope/write/partial.rs b/src/world/rw/isotope/write/partial.rs index 21eea5fb04..fd31505720 100644 --- a/src/world/rw/isotope/write/partial.rs +++ b/src/world/rw/isotope/write/partial.rs @@ -2,14 +2,29 @@ use std::marker::PhantomData; use crate::comp::discrim::{self, Mapped as _}; use crate::entity::ealloc; +use crate::system::access::{PartialStorageMap, StorageMap, StorageMapMut}; use crate::world::rw::isotope; use crate::{comp, system, world, Archetype}; +/// Provides access to an isotope component in a specific archetype. +/// +/// `DiscrimSetKey` is the type used to index the discriminant, +/// depending on the type of discriminant set given. +/// For vec and array discriminant sets, this should be `usize`. +/// For partial isotope access, `K` is usually `usize`. +/// For full isotope access, `K` is the discriminant type. +/// +/// To share the same API as [`ReadIsotopeFull`](system::ReadIsotopeFull), +/// immutable getters still require `&mut self`, +/// but there are `*_ref` variants for these functions that just require `&self`. +pub type WriteIsotopePartial>::Discrim>> = + system::AccessIsotope>; + impl world::Components { /// Mutably access the requested discriminants of an isotope storage, /// lazily initializing new isotopes in `discrims` immediately. /// - /// The return value implements [`ReadIsotopeRef`](system::ReadIsotopeRef), + /// The return value provides `*_ref` getters, /// allowing shared read-only use of this accessor on multiple workers. /// /// # Panics @@ -19,9 +34,7 @@ impl world::Components { &'t self, discrims: &'t DiscrimSet, snapshot: ealloc::Snapshot, - ) -> impl system::ReadIsotopeRef - + system::WriteIsotope - + 't + ) -> WriteIsotopePartial where A: Archetype, C: comp::Isotope, @@ -38,11 +51,11 @@ impl world::Components { }) }; - isotope::Base { getter: Getter:: { storages, _ph: PhantomData } } + system::AccessIsotope::new(Storages:: { storages, _ph: PhantomData }) } } -struct Getter +pub struct Storages where A: Archetype, C: comp::Isotope, @@ -52,7 +65,7 @@ where _ph: PhantomData<(A, C)>, } -impl isotope::read::StorageGet for Getter +impl StorageMap for Storages where A: Archetype, C: comp::Isotope, @@ -89,7 +102,7 @@ where } } -impl isotope::read::StorageGetRef for Getter +impl PartialStorageMap for Storages where A: Archetype, C: comp::Isotope, @@ -103,7 +116,7 @@ where } } -impl isotope::write::StorageGetMut for Getter +impl StorageMapMut for Storages where A: Archetype, C: comp::Isotope, diff --git a/src/world/rw/partition.rs b/src/world/rw/partition.rs deleted file mode 100644 index fab6f9629f..0000000000 --- a/src/world/rw/partition.rs +++ /dev/null @@ -1,120 +0,0 @@ -use std::any; -use std::marker::PhantomData; - -use rayon::prelude::ParallelIterator; - -use crate::entity::{self, ealloc, Raw as _}; -use crate::storage::{self, Partition as _}; -use crate::{comp, system, Archetype}; - -pub(super) struct PartitionAccessor<'t, A: Archetype, C, S> { - pub(super) partition: S, - pub(super) _ph: PhantomData<(A, &'t C)>, -} - -impl<'t, A, C, StorageParT> system::Mut for PartitionAccessor<'t, A, C, StorageParT> -where - A: Archetype, - C: Send + Sync + 'static, - StorageParT: storage::Partition<'t, A::RawEntity, C>, -{ - fn try_get_mut>(&mut self, entity: E) -> Option<&mut C> { - self.partition.get_mut(entity.id()) - } - - type IterMut<'u> = impl Iterator, &'u mut C)> + 'u where Self: 'u; - fn iter_mut(&mut self) -> Self::IterMut<'_> { - self.partition - .by_ref() - .iter_mut() - .map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } -} - -impl<'t, A, C, StorageParT> system::MutPartition<'t, A, C> - for PartitionAccessor<'t, A, C, StorageParT> -where - A: Archetype, - C: Send + Sync + 'static, - StorageParT: storage::Partition<'t, A::RawEntity, C>, -{ - fn split_at>(self, entity: E) -> (Self, Self) { - let (left, right) = self.partition.partition_at(entity.id()); - - (Self { partition: left, _ph: PhantomData }, Self { partition: right, _ph: PhantomData }) - } - - type IntoIterMut = impl Iterator, &'t mut C)>; - fn into_iter_mut(self) -> Self::IntoIterMut { - self.partition.iter_mut().map(|(entity, data)| (entity::TempRef::new(entity), data)) - } -} - -impl<'t, A, C, StorageParT> system::MutChunk for PartitionAccessor<'t, A, C, StorageParT> -where - A: Archetype, - C: Send + Sync + 'static, - StorageParT: storage::PartitionChunked<'t, A::RawEntity, C>, -{ - fn get_chunk_mut(&mut self, chunk: entity::TempRefChunk<'_, A>) -> &'_ mut [C] - where - C: comp::Must, - { - match self.partition.get_chunk_mut(chunk.start, chunk.end) { - Some(comp) => comp, - None => panic!( - "Component {}/{} implements comp::Must but is not present", - any::type_name::(), - any::type_name::(), - ), - } - } -} - -impl<'t, A, C, StorageParT> system::MutPartitionChunk<'t, A, C> - for PartitionAccessor<'t, A, C, StorageParT> -where - A: Archetype, - C: Send + Sync + 'static, - StorageParT: storage::PartitionChunked<'t, A::RawEntity, C>, -{ - type IntoIterChunksMut = impl Iterator, &'t mut [C])>; - - fn into_iter_chunks_mut(self) -> Self::IntoIterChunksMut { - self.partition.into_iter_chunks_mut().map(|(initial, data)| { - (entity::TempRefChunk::new(initial, initial.add(data.len())), data) - }) - } -} - -pub(super) fn mut_owned_par_iter_mut<'t, A: Archetype, C: 'static>( - partition: impl system::MutPartition<'t, A, C>, - snapshot: &'t ealloc::Snapshot, -) -> impl ParallelIterator, &'t mut C)> -where - C: comp::Must, -{ - rayon::iter::split((partition, snapshot.as_slice()), |(partition, slice)| { - let Some(midpt) = slice.midpoint_for_split() else { return ((partition, slice), None) }; - let (slice_left, slice_right) = slice.split_at(midpt); - let (partition_left, partition_right) = partition.split_at(entity::TempRef::new(midpt)); - ((partition_left, slice_left), Some((partition_right, slice_right))) - }) - .flat_map_iter(|(partition, _slice)| partition.into_iter_mut()) -} - -pub(super) fn mut_owned_par_iter_chunks_mut<'t, A: Archetype, C: 'static>( - partition: impl system::MutPartitionChunk<'t, A, C>, - snapshot: &'t ealloc::Snapshot, -) -> impl ParallelIterator, &'t mut [C])> -where - C: comp::Must, -{ - rayon::iter::split((partition, snapshot.as_slice()), |(partition, slice)| { - let Some(midpt) = slice.midpoint_for_split() else { return ((partition, slice), None) }; - let (slice_left, slice_right) = slice.split_at(midpt); - let (partition_left, partition_right) = partition.split_at(entity::TempRef::new(midpt)); - ((partition_left, slice_left), Some((partition_right, slice_right))) - }) - .flat_map_iter(|(partition, _slice)| partition.into_iter_chunks_mut()) -} diff --git a/src/world/rw/simple.rs b/src/world/rw/simple.rs index e6ccc66e0e..ba30ba03ad 100644 --- a/src/world/rw/simple.rs +++ b/src/world/rw/simple.rs @@ -1,14 +1,24 @@ use std::any::{type_name, TypeId}; -use std::marker::PhantomData; use std::ops; use parking_lot::{RwLockReadGuard, RwLockWriteGuard}; -use rayon::prelude::ParallelIterator; -use crate::entity::ealloc; -use crate::storage::{self, Chunked}; -use crate::world::{self, rw}; -use crate::{comp, entity, system, util, Archetype, Storage}; +use crate::world::{self}; +use crate::{comp, system, Archetype}; + +/// Provides access to a simple component in a specific archetype. +pub type ReadSimple<'t, A: Archetype, C: comp::Simple> = system::AccessSingle< + A, + C, + impl ops::Deref>::Storage> + 't, +>; + +/// Provides access to a simple component in a specific archetype. +pub type WriteSimple<'t, A: Archetype, C: comp::Simple> = system::AccessSingle< + A, + C, + impl ops::DerefMut>::Storage> + 't, +>; impl world::Components { /// Creates a read-only, shared accessor to the given archetyped simple component. @@ -16,9 +26,7 @@ impl world::Components { /// # Panics /// - if the archetyped component is not used in any systems /// - if another thread is exclusively accessing the same archetyped component. - pub fn read_simple_storage>( - &self, - ) -> impl system::ReadSimple + '_ { + pub fn read_simple_storage>(&self) -> ReadSimple { let storage = match self.archetype::().simple_storages.get(&TypeId::of::()) { Some(storage) => storage, None => panic!( @@ -38,7 +46,7 @@ impl world::Components { }; let guard = RwLockReadGuard::map(guard, |storage| storage.downcast_ref::()); - SimpleRw { storage: guard } + system::AccessSingle::new(guard) } /// Creates a writable, exclusive accessor to the given archetyped simple component. @@ -46,9 +54,7 @@ impl world::Components { /// # Panics /// - if the archetyped component is not used in any systems. /// - if another thread is accessing the same archetyped component. - pub fn write_simple_storage>( - &self, - ) -> impl system::WriteSimple + '_ { + pub fn write_simple_storage>(&self) -> WriteSimple { let storage = match self.archetype::().simple_storages.get(&TypeId::of::()) { Some(storage) => storage, None => panic!( @@ -67,34 +73,15 @@ impl world::Components { }; let guard = RwLockWriteGuard::map(guard, |storage| storage.downcast_mut::()); - SimpleRw { storage: guard } - } - - /// Iterates over all simple entity components in offline mode. - /// - /// Requires a mutable reference to the world to ensure that the world is offline. - pub fn iter_simple>( - &mut self, - ) -> impl Iterator, &mut C)> { - let typed = self.archetype_mut::(); - let storage = match typed.simple_storages.get_mut(&TypeId::of::()) { - Some(storage) => storage, - None => panic!( - "The component {} cannot be retrieved because it is not used in any systems", - type_name::() - ), - }; - let storage = storage.get_storage::(); - storage.iter_mut().map(|(entity, value)| (entity::TempRef::new(entity), value)) + system::AccessSingle::new(guard) } - /// Gets a reference to a simple entity component in offline mode. + /// Exclusively accesses a simple component type in offline mode. /// /// Requires a mutable reference to the world to ensure that the world is offline. - pub fn get_simple, E: entity::Ref>( + pub fn get_simple_storage>( &mut self, - entity: E, - ) -> Option<&mut C> { + ) -> system::AccessSingle { let typed = self.archetype_mut::(); let storage = match typed.simple_storages.get_mut(&TypeId::of::()) { Some(storage) => storage, @@ -104,201 +91,6 @@ impl world::Components { ), }; let storage = storage.get_storage::(); - storage.get_mut(entity.id()) - } -} - -#[derive(Clone, Copy)] -struct SimpleRw { - // S is a MappedRwLock(Read|Write)Guard - storage: S, -} - -impl system::Read for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::Deref + Sync, -{ - fn try_get>(&self, entity: E) -> Option<&C> { - self.storage.get(entity.id()) - } - - type Iter<'t> = impl Iterator, &'t C)> where Self: 't; - fn iter(&self) -> Self::Iter<'_> { - self.storage.iter().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } - - type DuplicateImmut<'t> = SimpleRw> where Self: 't; - fn duplicate_immut( - &self, - ) -> (SimpleRw>, SimpleRw>) - { - let dup = SimpleRw { storage: util::DoubleDeref(&self.storage) }; - (dup, dup) - } - - type ParIter<'t> = impl rayon::iter::ParallelIterator, &'t C)> where Self: 't, C: comp::Must; - fn par_iter<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot<::RawEntity>, - ) -> Self::ParIter<'t> - where - C: comp::Must, - { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - slice.iter_chunks().flat_map(::range).map(|id| { - let entity = entity::TempRef::new(id); - let data = self.get(entity); - (entity, data) - }) - }) - } -} - -impl system::ReadChunk for SimpleRw -where - A: Archetype, - C: comp::Simple + comp::Must, - StorageRef: ops::Deref + Sync, - C::Storage: storage::Chunked, -{ - fn get_chunk(&self, chunk: entity::TempRefChunk<'_, A>) -> &[C] { - self.storage.get_chunk(chunk.start, chunk.end).expect("chunk is not completely filled") - } - - type ParIterChunks<'t> = impl rayon::iter::ParallelIterator, &'t [C])> where Self: 't; - fn par_iter_chunks<'t>( - &'t self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterChunks<'t> { - rayon::iter::split(snapshot.as_slice(), |slice| slice.split()).flat_map_iter(|slice| { - // we don't need to split over the holes in parallel, - // because splitting the total space is more important than splitting the holes - slice.iter_chunks().map(|chunk| { - let chunk = entity::TempRefChunk::new(chunk.start, chunk.end); - let data = self.get_chunk(chunk); - (chunk, data) - }) - }) - } -} - -impl system::ReadSimple for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::Deref + Sync, -{ - fn access_chunk(&self) -> system::accessor::MustReadChunkSimple { - system::accessor::MustReadChunkSimple { storage: &self.storage } - } -} - -impl system::Mut for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut, -{ - fn try_get_mut>(&mut self, entity: E) -> Option<&mut C> { - self.storage.get_mut(entity.id()) - } - - type IterMut<'t> = impl Iterator, &'t mut C)> where Self: 't; - fn iter_mut(&mut self) -> Self::IterMut<'_> { - self.storage.iter_mut().map(|(entity, comp)| (entity::TempRef::new(entity), comp)) - } -} - -impl system::MutFull for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut, -{ - type Partition<'t> = rw::PartitionAccessor<'t, A, C, ::Partition<'t>> where Self: 't; - fn as_partition(&mut self) -> Self::Partition<'_> { - rw::PartitionAccessor { partition: self.storage.as_partition(), _ph: PhantomData } - } - - type ParIterMut<'t> = impl ParallelIterator, &'t mut C)> where Self: 't, C: comp::Must; - fn par_iter_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot, - ) -> Self::ParIterMut<'t> - where - C: comp::Must, - { - rw::mut_owned_par_iter_mut(self.as_partition(), snapshot) - } -} - -impl system::MutChunk for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut + Sync, - C::Storage: storage::Chunked, -{ - fn get_chunk_mut(&mut self, chunk: entity::TempRefChunk<'_, A>) -> &mut [C] - where - C: comp::Must, - { - self.storage.get_chunk_mut(chunk.start, chunk.end).expect("chunk is not completely filled") - } -} - -impl system::MutFullChunk for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut + Sync, - C::Storage: storage::Chunked, -{ - type Partition<'t> = impl system::MutPartitionChunk<'t, A, C> - where - Self: 't; - fn as_partition_chunk(&mut self) -> Self::Partition<'_> { - rw::PartitionAccessor { - partition: self.storage.as_partition_chunk(), - _ph: PhantomData, - } - } - - type ParIterChunksMut<'t> = impl ParallelIterator, &'t mut [C])> - where - Self: 't, - C: comp::Must; - fn par_iter_chunks_mut<'t>( - &'t mut self, - snapshot: &'t ealloc::Snapshot<::RawEntity>, - ) -> Self::ParIterChunksMut<'t> - where - C: comp::Must, - { - rw::mut_owned_par_iter_chunks_mut(self.as_partition_chunk(), snapshot) - } -} - -impl system::Write for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut + Sync, -{ - fn set>(&mut self, entity: E, value: Option) -> Option { - self.storage.set(entity.id(), value) - } -} - -impl system::WriteSimple for SimpleRw -where - A: Archetype, - C: comp::Simple, - StorageRef: ops::DerefMut + Sync, -{ - fn access_chunk_mut(&mut self) -> system::accessor::MustWriteChunkSimple<'_, A, C> { - system::accessor::MustWriteChunkSimple { storage: &mut self.storage } + system::AccessSingle::new(storage) } } diff --git a/src/world/tests.rs b/src/world/tests.rs index e6d0d9f4d0..b03d623f59 100644 --- a/src/world/tests.rs +++ b/src/world/tests.rs @@ -1,850 +1,4 @@ #![allow(clippy::ptr_arg)] -use crate::entity::{deletion, generation, Raw, Ref}; -use crate::system::{Read as _, Write as _}; -use crate::test_util::*; -use crate::{global, system, system_test, tracer, world, Entity}; - -#[system(dynec_as(crate))] -fn common_test_system( - _comp3: impl system::ReadSimple, - _comp4: impl system::WriteSimple, - _comp5: impl system::ReadSimple, - _comp6: impl system::ReadSimple, - #[dynec(isotope(discrim = [TestDiscrim1(11), TestDiscrim1(17)]))] _iso1: impl system::ReadIsotope< - TestArch, - IsoNoInit, - usize, - >, - #[dynec(global)] _aggregator: &mut Aggregator, - #[dynec(global)] _initials: &InitialEntities, -) { -} - -#[test] -fn test_dependencies_successful() { - let mut world = system_test!(common_test_system.build();); - let entity = world.create::(crate::comps![ @(crate) TestArch => - Simple1OptionalNoDepNoInit(1), Simple5RequiredNoInit(1), - @(TestDiscrim1(11), IsoNoInit(1)), - @(TestDiscrim1(13), IsoNoInit(2)), - @(TestDiscrim1(17), IsoNoInit(3)), - ]); - - match world.components.get_simple::(&entity) { - Some(&mut Simple4Depends12(c40, c41)) => { - assert_eq!(c40, 7); - assert_eq!(c41, (1 + 2) * 8); - } - None => panic!("Simple4Depends12 is used in system_with_comp3_comp4_comp5"), - } - - world.components.get_simple::(&entity); - // panic here -} - -#[test] -#[should_panic = "Cannot create an entity of type `dynec::test_util::TestArch` without explicitly \ - passing a component of type `dynec::test_util::Simple5RequiredNoInit`"] -fn test_dependencies_missing_required_simple() { - let mut world = system_test!(common_test_system.build();); - world.create::(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(1)]); -} - -#[test] -#[should_panic = "Cannot create an entity of type `dynec::test_util::TestArch` without explicitly \ - passing a component of type `dynec::test_util::Simple2OptionalDepends1`, or \ - `dynec::test_util::Simple1OptionalNoDepNoInit` to invoke its auto-initializer"] -fn test_dependencies_missing_required_dep() { - let mut world = system_test!(common_test_system.build();); - world.create::(crate::comps![@(crate) TestArch => Simple5RequiredNoInit(1)]); -} - -#[test] -fn test_global_update() { - #[system(dynec_as(crate))] - fn test_system(#[dynec(global)] aggregator: &mut Aggregator) { aggregator.comp30_sum = 1; } - - let mut world = system_test!(test_system.build();); - - world.execute(&tracer::Log(log::Level::Trace)); - - let aggregator = world.get_global::(); - assert_eq!(aggregator.comp30_sum, 1); -} - -#[test] -#[should_panic = "Global type dynec::world::tests::test_global_uninit::Uninit does not have an \ - initial impl and was not provided manually"] -fn test_global_uninit() { - #[global(dynec_as(crate))] - struct Uninit; - - #[system(dynec_as(crate))] - fn test_system(#[dynec(global)] _: &Uninit) {} - - let _world = system_test!(test_system.build();); -} - -#[test] -fn test_simple_fetch() { - #[system(dynec_as(crate))] - fn test_system( - mut comp5: impl system::WriteSimple, - #[dynec(global)] initials: &InitialEntities, - ) { - let ent = initials.strong.as_ref().expect("initials.strong is None"); - - let comp = comp5.get_mut(ent); - assert_eq!(comp.0, 7); - comp.0 += 13; - } - - let mut world = system_test!(test_system.build();); - - let ent = world.create(crate::comps![@(crate) TestArch => Simple5RequiredNoInit(7)]); - world.get_global::().strong = Some(ent.clone()); - - world.execute(&tracer::Log(log::Level::Trace)); - - let comp = world.components.get_simple::(ent); - assert_eq!(comp, Some(&mut Simple5RequiredNoInit(20))); -} - -fn isotope_discrim_read_test_system( - mut iso1: impl system::ReadIsotope, - mut iso2: impl system::ReadIsotope, - initials: &InitialEntities, -) { - let ent = initials.strong.as_ref().expect("initials.strong is None"); - - { - let iso = iso1.try_get(ent, TestDiscrim1(11)); - assert_eq!(iso, Some(&IsoNoInit(3))); - } - - // should not panic on nonexistent storages - { - let iso = iso1.try_get(ent, TestDiscrim1(17)); - assert_eq!(iso, None); - } - - // should return default value for autoinit isotopes - { - let iso = iso2.try_get(ent, TestDiscrim2(71)); - assert_eq!(iso, Some(&IsoWithInit(73))); - } - - let map = iso1.get_all(ent); - let mut map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); - map_vec.sort_by_key(|(TestDiscrim1(discrim), _)| *discrim); - assert_eq!(map_vec, vec![(TestDiscrim1(11), &IsoNoInit(3)), (TestDiscrim1(13), &IsoNoInit(5))]); -} - -fn isotope_discrim_test_world(system: impl system::Sendable + 'static) -> world::World { - let mut world = system_test!(system;); - - let ent = world.create(crate::comps![@(crate) TestArch => - @(TestDiscrim1(11), IsoNoInit(3)), - @(TestDiscrim1(13), IsoNoInit(5)), - ]); - world.get_global::().strong = Some(ent); - - world -} - -#[test] -fn test_full_isotope_discrim_write() { - #[system(dynec_as(crate))] - fn test_sys( - iso1: impl system::WriteIsotope, - iso2: impl system::WriteIsotope, - #[dynec(global)] initials: &InitialEntities, - ) { - isotope_discrim_read_test_system(iso1, iso2, initials); - } - - let mut world = isotope_discrim_test_world(test_sys.build()); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -fn test_full_isotope_discrim_read() { - #[system(dynec_as(crate))] - fn test_system( - iso1: impl system::ReadIsotope, - iso2: impl system::ReadIsotope, - #[dynec(global)] initials: &InitialEntities, - ) { - isotope_discrim_read_test_system(iso1, iso2, initials) - } - - let mut world = isotope_discrim_test_world(test_system.build()); - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -fn test_partial_isotope_discrim_write() { - partial_isotope_discrim_write( - vec![TestDiscrim1(7), TestDiscrim1(11), TestDiscrim1(17), TestDiscrim1(19)], - vec![ - (0, Some(IsoNoInit(2)), Some(None)), - (1, Some(IsoNoInit(3)), Some(Some(IsoNoInit(23)))), - (2, None, None), - (3, None, Some(Some(IsoNoInit(29)))), - ], - vec![(TestDiscrim1(11), IsoNoInit(23)), (TestDiscrim1(19), IsoNoInit(29))], - ); -} - -#[test] -fn test_partial_isotope_discrim_read() { - partial_isotope_discrim_read( - vec![TestDiscrim1(11), TestDiscrim1(17)], - vec![(0, Some(IsoNoInit(3))), (1, None)], - vec![(TestDiscrim1(11), IsoNoInit(3))], - ); -} - -#[test] -#[should_panic = "The index 42 is not available in the isotope request for \ - dynec::test_util::TestArch/dynec::test_util::IsoNoInit"] -fn test_partial_isotope_discrim_read_panic() { - partial_isotope_discrim_read(vec![TestDiscrim1(11)], vec![(42, None)], vec![]); -} - -fn partial_isotope_discrim_read( - req_discrims: Vec, - single_expects: Vec<(usize, Option)>, - expect_all: Vec<(TestDiscrim1, IsoNoInit)>, -) { - #[system(dynec_as(crate))] - fn test_system( - #[dynec(param)] _req_discrims: &Vec, - #[dynec(param)] single_expects: &Vec<(usize, Option)>, - #[dynec(param)] expect_all: &Vec<(TestDiscrim1, IsoNoInit)>, - #[dynec(isotope(discrim = _req_discrims))] mut iso1: impl system::ReadIsotope< - TestArch, - IsoNoInit, - usize, - >, - #[dynec(global)] initials: &InitialEntities, - ) { - let ent = initials.strong.as_ref().expect("initials.strong is None"); - - for (discrim, expect) in single_expects { - let iso = iso1.try_get(ent, *discrim); - assert_eq!(iso, expect.as_ref()); - } - - // should only include requested discriminants - let map = iso1.get_all(ent); - let mut map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); - map_vec.sort_by_key(|(TestDiscrim1(discrim), _)| *discrim); - let expect_all = - expect_all.iter().map(|(discrim, iso)| (*discrim, iso)).collect::>(); - assert_eq!(map_vec, expect_all); - } - - let mut world = system_test!( - test_system.build(req_discrims, single_expects, expect_all); - ); - - let ent = world.create(crate::comps![@(crate) TestArch => - @(TestDiscrim1(11), IsoNoInit(3)), - @(TestDiscrim1(13), IsoNoInit(5)), - ]); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -#[should_panic = "The index 42 is not available in the isotope request for \ - dynec::test_util::TestArch/dynec::test_util::IsoNoInit"] -fn test_partial_isotope_discrim_write_panic() { - partial_isotope_discrim_write(vec![TestDiscrim1(11)], vec![(42, None, None)], vec![]); -} - -type SingleExpectUpdate = (usize, Option, Option>); - -fn partial_isotope_discrim_write( - req_discrims: Vec, - single_expect_updates: Vec, - expect_all: Vec<(TestDiscrim1, IsoNoInit)>, -) { - #[system(dynec_as(crate))] - fn test_system( - #[dynec(param)] _req_discrims: &Vec, - #[dynec(param)] single_expect_updates: &mut Vec, - #[dynec(param)] expect_all: &Vec<(TestDiscrim1, IsoNoInit)>, - #[dynec(isotope(discrim = _req_discrims))] mut iso1: impl system::WriteIsotope< - TestArch, - IsoNoInit, - usize, - >, - #[dynec(global)] initials: &InitialEntities, - ) { - let ent = initials.strong.as_ref().expect("initials.strong is None"); - - for (discrim, mut expect, update) in single_expect_updates.drain(..) { - let iso = iso1.try_get_mut(ent, discrim); - assert_eq!(iso, expect.as_mut()); - if let Some(update) = update { - iso1.set(ent, discrim, update); - } - } - - // should only include requested discriminants - let map = iso1.get_all(ent); - let map_vec: Vec<(TestDiscrim1, &IsoNoInit)> = map.collect(); - let expect_all = - expect_all.iter().map(|(discrim, iso)| (*discrim, iso)).collect::>(); - assert_eq!(map_vec, expect_all); - } - - let mut world = - system_test!(test_system.build(req_discrims, single_expect_updates, expect_all);); - - let ent = world.create(crate::comps![@(crate) TestArch => - @(TestDiscrim1(7), IsoNoInit(2)), - @(TestDiscrim1(11), IsoNoInit(3)), - @(TestDiscrim1(13), IsoNoInit(5)), - ]); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -fn test_offline_create() { - #[global(dynec_as(crate), initial = Step::Create)] - enum Step { - Create, - Access, - } - - #[derive(Debug, PartialEq, Eq, Hash)] - struct LatePartition; - - #[system(dynec_as(crate), before(LatePartition))] - fn entity_creator_system( - mut entity_creator: impl system::EntityCreator, - #[dynec(global(maybe_uninit(TestArch)))] initials: &mut InitialEntities, - #[dynec(global)] step: &Step, - ) { - match step { - Step::Create => { - initials.strong = Some( - entity_creator - .create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(5)]), - ); - } - Step::Access => {} - } - } - - #[system(dynec_as(crate))] - fn comp_access_system( - comp1: impl system::ReadSimple, - #[dynec(global)] initials: &InitialEntities, - #[dynec(global)] step: &Step, - ) { - match step { - Step::Create => { - assert!(initials.strong.is_none()); - } - Step::Access => { - let ent = initials.strong.as_ref().expect("initials.strong should have been set"); - comp1.try_get(ent).expect("initials.strong should have been initialized"); - } - } - } - - #[system(dynec_as(crate), after(LatePartition))] - fn late_comp_access_system( - // component storage does not require maybe_uninit unless the component has something like `Option>` - comp1: impl system::ReadSimple, - #[dynec(global(maybe_uninit(TestArch)))] initials: &InitialEntities, - #[dynec(global)] step: &Step, - ) { - match step { - Step::Create => { - let ent = initials.strong.as_ref().expect("initials.strong should have been set"); - assert!(comp1.try_get(ent).is_none(), "entity should be in pre-initialize state"); - } - Step::Access => { - let ent = initials.strong.as_ref().expect("initials.strong should have been set"); - comp1.try_get(ent).expect("initials.strong should have been initialized"); - } - } - } - - let mut world = system_test!(comp_access_system.build(), late_comp_access_system.build(), entity_creator_system.build();); - - world.execute(&tracer::Log(log::Level::Trace)); - *world.get_global::() = Step::Access; - world.execute(&tracer::Log(log::Level::Trace)); - - let ent = { - let initials = world.get_global::(); - let ent = initials.strong.as_ref().expect("initials.strong missing"); - ent.clone() - }; - let comp1 = world.components.get_simple::(&ent); - assert_eq!(comp1, Some(&mut Simple1OptionalNoDepNoInit(5))); -} - -#[test] -#[should_panic = "Scheduled systems have a cyclic dependency: "] -fn test_offline_create_conflict() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_creator: impl system::EntityCreator, - #[dynec(global)] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - initials.strong = Some( - entity_creator - .create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(5)]), - ); - } - - let mut world = system_test!(test_system.build();); - - world.execute(&tracer::Log(log::Level::Trace)); - - let ent = { - let initials = world.get_global::(); - let ent = initials.strong.as_ref().expect("initials.strong missing"); - ent.clone() - }; - let comp1 = world.components.get_simple::(&ent); - assert_eq!(comp1, Some(&mut Simple1OptionalNoDepNoInit(5))); -} - -#[test] -fn test_offline_delete() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - entity_deleter.queue(initials.strong.take().expect("initials.strong missing")); - } - - let mut world = system_test!(test_system.build();); - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); - let weak = ent.weak(world.get_global::()); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); - - let comp1 = world.components.get_simple::(&weak); - assert_eq!(comp1, None); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - system dynec::world::tests::test_system. All strong references to an entity \ - must be dropped before queuing for deletion and removing all finalizers." -)] -fn test_offline_delete_send_system_leak() { - #[system(dynec_as(crate))] - fn test_system( - #[dynec(local(initial = None, entity))] entity: &mut Option>, - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - if let Some(ent) = initials.strong.take() { - *entity = Some(ent); - } - - if let Some(ent) = entity { - entity_deleter.queue(&*ent); - } - } - - let mut builder = world::Builder::new(0); - builder.schedule(Box::new(test_system.build())); - - let mut world = builder.build(); - - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); - let weak = ent.weak(world.get_global::()); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); - - let comp1 = world.components.get_simple::(&weak); - assert_eq!(comp1, None); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - system dynec::world::tests::test_system. All strong references to an entity \ - must be dropped before queuing for deletion and removing all finalizers." -)] -fn test_offline_delete_unsend_system_leak() { - #[system(dynec_as(crate), thread_local)] - fn test_system( - #[dynec(local(initial = None, entity))] entity: &mut Option>, - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - if let Some(ent) = initials.strong.take() { - *entity = Some(ent); - } - - if let Some(ent) = entity { - entity_deleter.queue(&*ent); - } - } - - let mut builder = world::Builder::new(0); - builder.schedule_thread_unsafe(Box::new(test_system.build())); - - let mut world = builder.build(); - - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); - let weak = ent.weak(world.get_global::()); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); - - let comp1 = world.components.get_simple::(&weak); - assert_eq!(comp1, None); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - global state dynec::test_util::InitialEntities. All strong references to an \ - entity must be dropped before queuing for deletion and removing all \ - finalizers." -)] -fn test_offline_delete_sync_global_leak() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - entity_deleter.queue(initials.strong.as_ref().expect("initials.strong missing")); - } - - let mut world = system_test!(test_system.build();); - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); - world.get_global::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - global state dynec::test_util::InitialEntities. All strong references to an \ - entity must be dropped before queuing for deletion and removing all \ - finalizers." -)] -fn test_offline_delete_unsync_global_leak() { - #[system(dynec_as(crate), thread_local)] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global(thread_local))] initials: &mut InitialEntities, - _comp1: impl system::ReadSimple, - ) { - entity_deleter.queue(initials.strong.as_ref().expect("initials.strong missing")); - } - - let mut builder = world::Builder::new(0); - builder.schedule_thread_unsafe(Box::new(test_system.build())); - - let mut world = builder.build(); - - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(7)]); - world.get_global_unsync::().strong = Some(ent); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - dynec::test_util::TestArch / dynec::test_util::StrongRefSimple. All strong \ - references to an entity must be dropped before queuing for deletion and \ - removing all finalizers." -)] -fn test_offline_delete_simple_leak() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _srs: impl system::ReadSimple, - ) { - let entity = initials.weak.as_ref().expect("initials.strong missing"); - entity_deleter.queue(entity); - } - - let mut builder = world::Builder::new(0); - builder.schedule(Box::new(test_system.build())); - - let mut world = builder.build(); - - let ent = world.create(crate::comps![@(crate) TestArch =>]); - let weak = ent.weak(world.get_global::()); - world.get_global::().weak = Some(weak); - - world.create(crate::comps![@(crate) TestArch => StrongRefSimple(ent)]); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -#[cfg_attr( - any( - all(debug_assertions, feature = "debug-entity-rc"), - all(not(debug_assertions), feature = "release-entity-rc"), - ), - should_panic = "Detected dangling strong reference to entity dynec::test_util::TestArch#1 in \ - dynec::test_util::TestArch / dynec::test_util::StrongRefIsotope # \ - TestDiscrim1(29). All strong references to an entity must be dropped before \ - queuing for deletion and removing all finalizers." -)] -fn test_offline_delete_isotope_leak() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - _sri: impl system::ReadIsotope, - ) { - let entity = initials.weak.as_ref().expect("initials.strong missing"); - entity_deleter.queue(entity); - } - - let mut builder = world::Builder::new(0); - builder.schedule(Box::new(test_system.build())); - - let mut world = builder.build(); - - let ent = world.create(crate::comps![@(crate) TestArch =>]); - let weak = ent.weak(world.get_global::()); - world.get_global::().weak = Some(weak); - - world.create(crate::comps![@(crate) TestArch => @(TestDiscrim1(29), StrongRefIsotope(ent))]); - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -fn test_offline_finalizer_delete() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_deleter: impl system::EntityDeleter, - #[dynec(global)] initials: &mut InitialEntities, - deletion_flags: impl system::ReadSimple, - mut comp_final: impl system::WriteSimple, - _comp1: impl system::ReadSimple, - ) { - let ent = initials.strong.as_ref().expect("initials.strong missing"); - if deletion_flags.try_get(ent).is_some() { - comp_final.set(ent, None); - initials.strong = None; - } else { - entity_deleter.queue(ent); - } - } - - let mut world = system_test!(test_system.build();); - - for _ in 0..3 { - let ent = world.create(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(13), Simple7WithFinalizerNoinit]); - let weak = ent.weak(world.get_global::()); - world.get_global::().strong = Some(ent); - - // first iteration - world.execute(&tracer::Log(log::Level::Trace)); - - let comp1 = world.components.get_simple::(&weak); - assert_eq!(comp1, Some(&mut Simple1OptionalNoDepNoInit(13))); - - // second iteration - world.execute(&tracer::Log(log::Level::Trace)); - - let comp1 = world.components.get_simple::(&weak); - assert_eq!(comp1, None); - } -} - -#[test] -fn test_entity_iter_partial_mut() { - #[system(dynec_as(crate))] - fn test_system( - iter: impl system::EntityIterator, - comp1_acc: impl system::ReadSimple, - #[dynec(isotope(discrim = [TestDiscrim1(7), TestDiscrim1(13)]))] - mut iso1_acc: impl system::WriteIsotope, - #[dynec(isotope(discrim = [TestDiscrim1(31)]))] mut iso1_acc_31: impl system::ReadIsotope< - TestArch, - IsoNoInit, - usize, - >, - ) { - let [mut iso1_acc_0, mut iso1_acc_1] = iso1_acc.split_isotopes([0, 1]); - let [iso1_acc_31] = iso1_acc_31.split([0]); - - for (entity, (comp1, iso10, iso11, iso131)) in iter.entities_with(( - comp1_acc.try_access(), - iso1_acc_0.try_access_mut(), - iso1_acc_1.try_access_mut(), - iso1_acc_31.try_access(), - )) { - match entity.id().to_primitive() { - 1 => { - assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(5))); - assert_eq!(iso10, Some(&mut IsoNoInit(11))); - assert_eq!(iso11, None); - assert_eq!(iso131, Some(&IsoNoInit(41))); - } - 2 => { - assert_eq!(comp1, None); - assert_eq!(iso10, None); - assert_eq!(iso11, Some(&mut IsoNoInit(17))); - assert_eq!(iso131, Some(&IsoNoInit(43))); - } - 3 => { - assert_eq!(comp1, None); - assert_eq!(iso10, Some(&mut IsoNoInit(19))); - assert_eq!(iso11, Some(&mut IsoNoInit(23))); - assert_eq!(iso131, None); - } - _ => unreachable!(), - } - } - } - - let mut world = system_test! { - test_system.build(); - _: TestArch = ( - Simple1OptionalNoDepNoInit(5), - @(TestDiscrim1(7), IsoNoInit(11)), - @(TestDiscrim1(31), IsoNoInit(41)), - ); - _: TestArch = ( - @(TestDiscrim1(13), IsoNoInit(17)), - @(TestDiscrim1(31), IsoNoInit(43)), - ); - _: TestArch = ( - @(TestDiscrim1(7), IsoNoInit(19)), - @(TestDiscrim1(13), IsoNoInit(23)), - ); - }; - - world.execute(&tracer::Log(log::Level::Trace)); -} - -#[test] -fn test_entity_iter_full_mut() { - #[system(dynec_as(crate))] - fn test_system( - iter: impl system::EntityIterator, - comp1_acc: impl system::ReadSimple, - mut iso1_acc: impl system::WriteIsotope, - ) { - let [mut iso1_acc_0, mut iso1_acc_1] = - iso1_acc.split_isotopes([TestDiscrim1(7), TestDiscrim1(13)]); - - for (entity, (comp1, iso10, iso11)) in iter.entities_with(( - comp1_acc.try_access(), - iso1_acc_0.try_access_mut(), - iso1_acc_1.try_access_mut(), - )) { - match entity.id().to_primitive() { - 1 => { - assert_eq!(comp1, Some(&Simple1OptionalNoDepNoInit(5))); - assert_eq!(iso10, Some(&mut IsoNoInit(11))); - assert_eq!(iso11, None); - } - 2 => { - assert_eq!(comp1, None); - assert_eq!(iso10, None); - assert_eq!(iso11, Some(&mut IsoNoInit(17))); - } - 3 => { - assert_eq!(comp1, None); - assert_eq!(iso10, Some(&mut IsoNoInit(19))); - assert_eq!(iso11, Some(&mut IsoNoInit(23))); - } - _ => unreachable!(), - } - } - } - - let mut world = system_test! { - test_system.build(); - _: TestArch = ( - Simple1OptionalNoDepNoInit(5), - @(TestDiscrim1(7), IsoNoInit(11)), - ); - _: TestArch = ( - @(TestDiscrim1(13), IsoNoInit(17)), - ); - _: TestArch = ( - @(TestDiscrim1(7), IsoNoInit(19)), - @(TestDiscrim1(13), IsoNoInit(23)), - ); - }; - - world.execute(&tracer::Log(log::Level::Trace)); -} - -// Test that there is no access conflict when creating, deleting and iterating the same archetype. -#[test] -fn test_entity_create_and_delete() { - #[system(dynec_as(crate))] - fn test_system( - mut entity_creator: impl system::EntityCreator, - _entity_deleter: impl system::EntityDeleter, - entity_iter: impl system::EntityIterator, - ) { - let entity = entity_creator - .create(crate::comps![ @(crate) TestArch => Simple1OptionalNoDepNoInit(1) ]); - for v in entity_iter.entities() { - assert_ne!(entity.id(), v.id()); - } - } - - #[system(dynec_as(crate))] - fn dummy_reader_system(_: impl system::ReadSimple) {} - - let mut world = system_test! { - test_system.build(), dummy_reader_system.build(); - }; - world.execute(&tracer::Log(log::Level::Trace)); -} +mod dependencies; +mod globals; diff --git a/src/world/tests/dependencies.rs b/src/world/tests/dependencies.rs new file mode 100644 index 0000000000..854b416fa7 --- /dev/null +++ b/src/world/tests/dependencies.rs @@ -0,0 +1,58 @@ +//! Tests autoinit dependencies. + +use crate::test_util::*; +use crate::{system, system_test}; + +#[system(dynec_as(crate))] +fn common_test_system( + _comp3: system::ReadSimple, + _comp4: system::WriteSimple, + _comp5: system::ReadSimple, + _comp6: system::ReadSimple, + #[dynec(isotope(discrim = [TestDiscrim1(11), TestDiscrim1(17)]))] + _iso1: system::ReadIsotopePartial, + #[dynec(global)] _aggregator: &mut Aggregator, + #[dynec(global)] _initials: &InitialEntities, +) { +} + +#[test] +fn test_dependencies_successful() { + let mut world = system_test!(common_test_system.build();); + let entity = world.create::(crate::comps![ @(crate) TestArch => + Simple1OptionalNoDepNoInit(1), Simple5RequiredNoInit(1), + @(TestDiscrim1(11), IsoNoInit(1)), + @(TestDiscrim1(13), IsoNoInit(2)), + @(TestDiscrim1(17), IsoNoInit(3)), + ]); + + match world.components.get_simple_storage::().try_get(&entity) { + Some(&Simple4Depends12(c40, c41)) => { + assert_eq!(c40, 7); + assert_eq!(c41, (1 + 2) * 8); + } + None => panic!("Simple4Depends12 is used in system_with_comp3_comp4_comp5"), + } + + world.components.get_simple_storage::(); // should not panic +} + +#[test] +#[should_panic = "Cannot create an entity of type `dynec::test_util::TestArch` without explicitly \ + passing a component of type \ + `dynec::test_util::simple_comps::Simple5RequiredNoInit`"] +fn test_dependencies_missing_required_simple() { + let mut world = system_test!(common_test_system.build();); + world.create::(crate::comps![@(crate) TestArch => Simple1OptionalNoDepNoInit(1)]); +} + +#[test] +#[should_panic = "Cannot create an entity of type `dynec::test_util::TestArch` without explicitly \ + passing a component of type \ + `dynec::test_util::simple_comps::Simple2OptionalDepends1`, or \ + `dynec::test_util::simple_comps::Simple1OptionalNoDepNoInit` to invoke its \ + auto-initializer"] +fn test_dependencies_missing_required_dep() { + let mut world = system_test!(common_test_system.build();); + world.create::(crate::comps![@(crate) TestArch => Simple5RequiredNoInit(1)]); +} diff --git a/src/world/tests/globals.rs b/src/world/tests/globals.rs new file mode 100644 index 0000000000..54a7b25575 --- /dev/null +++ b/src/world/tests/globals.rs @@ -0,0 +1,30 @@ +//! Tests global state access. + +use crate::test_util::*; +use crate::{global, system, system_test, tracer}; + +#[test] +fn test_global_update() { + #[system(dynec_as(crate))] + fn test_system(#[dynec(global)] aggregator: &mut Aggregator) { aggregator.comp30_sum = 1; } + + let mut world = system_test!(test_system.build();); + + world.execute(&tracer::Log(log::Level::Trace)); + + let aggregator = world.get_global::(); + assert_eq!(aggregator.comp30_sum, 1); +} + +#[test] +#[should_panic = "Global type dynec::world::tests::globals::test_global_uninit::Uninit does not \ + have an initial impl and was not provided manually"] +fn test_global_uninit() { + #[global(dynec_as(crate))] + struct Uninit; + + #[system(dynec_as(crate))] + fn test_system(#[dynec(global)] _: &Uninit) {} + + let _world = system_test!(test_system.build();); +}