spirt/
context.rs

1//! [`Context`](struct.Context.html) and related types/traits.
2
3use crate::spv::spec::ExtInstSetDesc;
4use rustc_hash::FxHashMap;
5use std::hash::Hash;
6use std::mem;
7use std::num::NonZeroU32;
8use std::ops::{Deref, DerefMut};
9
10/// Context object with global resources for SPIR-T.
11///
12/// Those resources currently are:
13/// * interners, for anything without an identity, and which can be deduplicated
14/// * "entity" allocators, for everything else - i.e. anything with an identity
15///   that needs to remain unique across an entire [`Context`]
16///   * the *definition* of an entity isn't kept in the [`Context`], but rather in
17///     some [`EntityDefs`] collection somewhere in a [`Module`](crate::Module) (or further nested),
18///     with only the entity *indices* being allocated by the [`Context`]
19/// * custom SPIR-V "extended instruction set" descriptions, which can be
20///   dynamically registered, to account for any such "extended instruction set"
21///   not covered by [`spv::spec::Spec`](crate::spv::spec::Spec)'s built-in list
22///   (e.g. non-standard tool-specific sets), and only used for pretty-printing
23#[derive(Default)]
24pub struct Context {
25    interners: Interners,
26    entity_allocs: EntityAllocs,
27
28    custom_spv_ext_inst_set_descs: elsa::FrozenBTreeMap<String, Box<ExtInstSetDesc>>,
29}
30
31impl Context {
32    /// Register a custom [`ExtInstSetDesc`] with name `ext_inst_set_name`,
33    /// to be used by pretty-printing when using this [`Context`].
34    pub fn register_custom_ext_inst_set(
35        &self,
36        ext_inst_set_name: &str,
37        ext_inst_set_desc: ExtInstSetDesc,
38    ) {
39        let lowercase_ext_inst_set_name = ext_inst_set_name.to_ascii_lowercase();
40        assert!(
41            self.get_custom_ext_inst_set_by_lowercase_name(&lowercase_ext_inst_set_name).is_none(),
42            "already registered {lowercase_ext_inst_set_name:?} \
43             (name before lowercasing: {ext_inst_set_name})"
44        );
45        self.custom_spv_ext_inst_set_descs
46            .insert(lowercase_ext_inst_set_name, Box::new(ext_inst_set_desc));
47    }
48
49    /// Return a custom [`ExtInstSetDesc`], if one was registered on this [`Context`],
50    /// for this `OpExtInstImport` name (required to be lowercase, due to Khronos'
51    /// choice of case insensitivity, but **not checked by this function**).
52    pub fn get_custom_ext_inst_set_by_lowercase_name(
53        &self,
54        lowercase_ext_inst_set_name: &str,
55    ) -> Option<&ExtInstSetDesc> {
56        self.custom_spv_ext_inst_set_descs.get(lowercase_ext_inst_set_name)
57    }
58}
59
60/// Private module containing traits (and related types) used in public APIs,
61/// but which should not be usable outside of the `context` module.
62mod sealed {
63    use std::cell::Cell;
64    use std::num::NonZeroU32;
65
66    pub trait Interned: Sized + 'static {
67        type Def: ?Sized + Eq + std::hash::Hash;
68
69        fn preintern(_interner: &Interner<Self>) {}
70        fn from_u32(i: u32) -> Self;
71        fn to_u32(self) -> u32;
72        fn cx_interner(cx: &super::Context) -> &Interner<Self>;
73    }
74
75    // FIXME(eddyb) one `Box` per element is inefficient, figure out if e.g.
76    // the `rental` crate could allow keeping an `arena: TypedArena<I::Def>`
77    // alongside the `FrozenIndexSet` (which would then use `&'arena I::Def`).
78    pub struct Interner<I: Interned>(
79        elsa::FrozenIndexSet<Box<I::Def>, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>,
80    );
81
82    impl<I: Interned> Default for Interner<I> {
83        fn default() -> Self {
84            let interner = Self(Default::default());
85            I::preintern(&interner);
86            interner
87        }
88    }
89
90    impl<I: Interned> Interner<I> {
91        #[track_caller]
92        pub(super) fn intern(&self, value: impl AsRef<I::Def> + Into<Box<I::Def>>) -> I {
93            if let Some((i, _)) = self.0.get_full(value.as_ref()) {
94                return I::from_u32(i as u32);
95            }
96            let (i, _) = self.0.insert_full(value.into());
97            I::from_u32(i.try_into().expect("interner overflowed u32"))
98        }
99    }
100
101    impl<I: Interned> std::ops::Index<I> for Interner<I> {
102        type Output = I::Def;
103
104        fn index(&self, interned: I) -> &Self::Output {
105            &self.0[interned.to_u32() as usize]
106        }
107    }
108
109    // FIXME(eddyb) reflect "is an `Entity`" in a non-sealed way, by having an
110    // e.g. `pub trait IsEntity: Entity {}` w/ a blanket impl, that users could
111    // not implement themselves because of the `Entity` requirement, but could
112    // still bound on, and it would imply `Entity` as needed - this could even
113    // be named `Entity`, with `sealed::Entity` only used (by `context`) for:
114    // - `impl sealed::Entity for $name` to define an entity
115    // - `use sealed::Entity as _;` to use associated items from the trait
116    pub trait Entity: Sized + Copy + Eq + std::hash::Hash + 'static {
117        type Def;
118
119        const CHUNK_SIZE: u32;
120        const CHUNK_MASK: u32 = {
121            assert!(Self::CHUNK_SIZE.is_power_of_two());
122            assert!(Self::CHUNK_SIZE as usize as u32 == Self::CHUNK_SIZE);
123            Self::CHUNK_SIZE - 1
124        };
125
126        fn from_non_zero_u32(i: NonZeroU32) -> Self;
127        fn to_non_zero_u32(self) -> NonZeroU32;
128        fn cx_entity_alloc(cx: &super::Context) -> &EntityAlloc<Self>;
129
130        #[inline(always)]
131        fn to_chunk_start_and_intra_chunk_idx(self) -> (Self, usize) {
132            let self_u32 = self.to_non_zero_u32().get();
133            (
134                Self::from_non_zero_u32(NonZeroU32::new(self_u32 & !Self::CHUNK_MASK).unwrap()),
135                (self_u32 & Self::CHUNK_MASK) as usize,
136            )
137        }
138    }
139
140    pub struct EntityAlloc<E: Entity>(Cell<E>);
141
142    impl<E: Entity> Default for EntityAlloc<E> {
143        fn default() -> Self {
144            // NOTE(eddyb) always skip chunk `0`, as a sort of "null page",
145            // to allow using `NonZeroU32` instead of merely `u32`.
146            Self(Cell::new(E::from_non_zero_u32(NonZeroU32::new(E::CHUNK_SIZE).unwrap())))
147        }
148    }
149
150    impl<E: Entity> EntityAlloc<E> {
151        #[track_caller]
152        pub(super) fn alloc_chunk(&self) -> E {
153            let chunk_start = self.0.get();
154            let next_chunk_start = E::from_non_zero_u32(
155                chunk_start
156                    .to_non_zero_u32()
157                    .checked_add(E::CHUNK_SIZE)
158                    .expect("entity index overflowed u32"),
159            );
160            self.0.set(next_chunk_start);
161            chunk_start
162        }
163    }
164}
165use sealed::Entity as _;
166
167/// Dispatch helper, to allow implementing interning logic on
168/// the type passed to `cx.intern(...)`.
169pub trait InternInCx<I> {
170    #[track_caller]
171    fn intern_in_cx(self, cx: &Context) -> I;
172}
173
174impl Context {
175    pub fn new() -> Self {
176        Self::default()
177    }
178
179    #[track_caller]
180    pub fn intern<T: InternInCx<I>, I>(&self, x: T) -> I {
181        x.intern_in_cx(self)
182    }
183}
184
185impl<I: sealed::Interned> std::ops::Index<I> for Context {
186    type Output = I::Def;
187
188    fn index(&self, interned: I) -> &Self::Output {
189        &I::cx_interner(self)[interned]
190    }
191}
192
193// FIXME(eddyb) consider including `Rc<Context>` in `EntityDefs` to avoid having
194// to pass it manually to the `EntityDefs::define` methods (which feels dangerous!).
195//
196/// Collection holding the actual definitions for [`Context`]-allocated entities.
197///
198/// By design there is no way to iterate the contents of an [`EntityDefs`], or
199/// generate entity indices without defining the entity in an [`EntityDefs`].
200#[derive(Clone)]
201pub struct EntityDefs<E: sealed::Entity> {
202    /// Entities are grouped into chunks, with per-entity-type chunk sizes
203    /// (powers of 2) specified via `entities!` below.
204    /// This allows different [`EntityDefs`]s to independently define more
205    /// entities, without losing compactness (until a whole chunk is filled).
206    //
207    // FIXME(eddyb) consider using `u32` instead of `usize` for the "flattened base".
208    complete_chunk_start_to_flattened_base: FxHashMap<E, usize>,
209
210    /// Similar to a single entry in `complete_chunk_start_to_flattened_base`,
211    /// but kept outside of the map for efficiency. Also, this is the only
212    /// chunk that doesn't have its full size already (and therefore allows
213    /// defining more entities into it, without allocating new chunks).
214    incomplete_chunk_start_and_flattened_base: Option<(E, usize)>,
215
216    /// All chunks' definitions are flattened into one contiguous [`Vec`], where
217    /// the start of each chunk's definitions in `flattened` is indicated by
218    /// either `complete_chunk_start_to_flattened_base` (for completed chunks)
219    /// or `incomplete_chunk_start_and_flattened_base`.
220    flattened: Vec<E::Def>,
221}
222
223impl<E: sealed::Entity> Default for EntityDefs<E> {
224    fn default() -> Self {
225        Self {
226            complete_chunk_start_to_flattened_base: FxHashMap::default(),
227            incomplete_chunk_start_and_flattened_base: None,
228            flattened: vec![],
229        }
230    }
231}
232
233impl<E: sealed::Entity> EntityDefs<E> {
234    pub fn new() -> Self {
235        Self::default()
236    }
237
238    #[track_caller]
239    pub fn define(&mut self, cx: &Context, def: E::Def) -> E {
240        let entity = match self.incomplete_chunk_start_and_flattened_base {
241            Some((chunk_start, flattened_base)) => {
242                let chunk_len = self.flattened.len() - flattened_base;
243
244                // This is the last entity in this chunk, completing it.
245                // NB: no new chunk is allocated here, but instead the next
246                // `define` call will find no incomplete chunk, which will
247                // prompt it to allocate a new chunk itself.
248                if chunk_len == (E::CHUNK_SIZE - 1) as usize {
249                    self.complete_chunk_start_to_flattened_base
250                        .extend(self.incomplete_chunk_start_and_flattened_base.take());
251                }
252
253                E::from_non_zero_u32(
254                    NonZeroU32::new(chunk_start.to_non_zero_u32().get() + chunk_len as u32)
255                        .unwrap(),
256                )
257            }
258            None => {
259                let chunk_start = E::cx_entity_alloc(cx).alloc_chunk();
260
261                self.incomplete_chunk_start_and_flattened_base =
262                    Some((chunk_start, self.flattened.len()));
263
264                chunk_start
265            }
266        };
267        self.flattened.push(def);
268        entity
269    }
270
271    fn entity_to_flattened(&self, entity: E) -> Option<usize> {
272        let (chunk_start, intra_chunk_idx) = entity.to_chunk_start_and_intra_chunk_idx();
273        let flattened_base = match self.incomplete_chunk_start_and_flattened_base {
274            Some((incomplete_chunk_start, incomplete_flattened_base))
275                if chunk_start == incomplete_chunk_start =>
276            {
277                incomplete_flattened_base
278            }
279            _ => *self.complete_chunk_start_to_flattened_base.get(&chunk_start)?,
280        };
281        Some(flattened_base + intra_chunk_idx)
282    }
283}
284
285impl<E: sealed::Entity> std::ops::Index<E> for EntityDefs<E> {
286    type Output = E::Def;
287
288    fn index(&self, entity: E) -> &Self::Output {
289        self.entity_to_flattened(entity).and_then(|i| self.flattened.get(i)).unwrap()
290    }
291}
292
293impl<E: sealed::Entity> std::ops::IndexMut<E> for EntityDefs<E> {
294    fn index_mut(&mut self, entity: E) -> &mut Self::Output {
295        self.entity_to_flattened(entity).and_then(|i| self.flattened.get_mut(i)).unwrap()
296    }
297}
298
299/// `EntityOriented*Map<Self, V>` support trait, implemented for entity types,
300/// but which can also be implemented by users for their own newtypes and other
301/// types wrapping entity types (such as finite `enum`s).
302pub trait EntityOrientedMapKey<V>: Copy {
303    /// The entity type that appears exactly once in every value of `Self`.
304    type Entity: sealed::Entity;
305    fn to_entity(key: Self) -> Self::Entity;
306
307    /// A type holding enough different `Option<V>` slots, for all possible
308    /// values of `Self`, for a given `Self::Entity` value contained inside.
309    //
310    // FIXME(eddyb) consider making this just an array length?
311    type DenseValueSlots: Default;
312    fn get_dense_value_slot(key: Self, slots: &Self::DenseValueSlots) -> &Option<V>;
313    fn get_dense_value_slot_mut(key: Self, slots: &mut Self::DenseValueSlots) -> &mut Option<V>;
314}
315
316impl<E: sealed::Entity, V> EntityOrientedMapKey<V> for E {
317    type Entity = E;
318    fn to_entity(key: E) -> E {
319        key
320    }
321
322    type DenseValueSlots = Option<V>;
323    fn get_dense_value_slot(_: Self, slot: &Option<V>) -> &Option<V> {
324        slot
325    }
326    fn get_dense_value_slot_mut(_: Self, slot: &mut Option<V>) -> &mut Option<V> {
327        slot
328    }
329}
330
331/// Map with `K` keys and `V` values, that is:
332/// * "entity-oriented" `K` keys, i.e. that are or contain exactly one entity
333///   (supported via [`K: EntityOrientedMapKey<V>`](EntityOrientedMapKey) for extensibility)
334/// * "dense" in the sense of few (or no) gaps in (the entities in) its keys
335///   (relative to the entities defined in the corresponding [`EntityDefs`])
336///
337/// By design there is no way to iterate the entries in an [`EntityOrientedDenseMap`].
338//
339// FIXME(eddyb) implement a "sparse" version as well, and maybe some bitsets?
340#[derive(Clone)]
341pub struct EntityOrientedDenseMap<K: EntityOrientedMapKey<V>, V> {
342    /// Like in [`EntityDefs`], entities are grouped into chunks, but there is no
343    /// flattening, since arbitrary insertion orders have to be supported.
344    chunk_start_to_value_slots: SmallFxHashMap<K::Entity, Vec<K::DenseValueSlots>>,
345}
346
347// FIXME(eddyb) find a better "small map" design and/or fine-tune this - though,
348// since the ideal state is one chunk per map, the slow case might never be hit,
349// unless one `EntityOrientedDenseMap` is used with more than one `EntityDefs`,
350// which could still maybe be implemented more efficiently than `FxHashMap`.
351#[derive(Clone)]
352enum SmallFxHashMap<K, V> {
353    Empty,
354    One(K, V),
355    More(FxHashMap<K, V>),
356}
357
358impl<K, V> Default for SmallFxHashMap<K, V> {
359    fn default() -> Self {
360        Self::Empty
361    }
362}
363
364impl<K: Copy + Eq + Hash, V: Default> SmallFxHashMap<K, V> {
365    fn get_mut_or_insert_default(&mut self, k: K) -> &mut V {
366        // HACK(eddyb) to avoid borrowing issues, this is done in two stages:
367        // 1. ensure `self` is `One(k, _) | More`, i.e. `One` implies same key
368        match *self {
369            Self::Empty => {
370                *self = Self::One(k, V::default());
371            }
372            Self::One(old_k, _) => {
373                if old_k != k {
374                    let old = mem::replace(self, Self::More(Default::default()));
375                    match (old, &mut *self) {
376                        (Self::One(_, old_v), Self::More(map)) => {
377                            map.insert(old_k, old_v);
378                        }
379                        _ => unreachable!(),
380                    }
381                }
382            }
383            Self::More(_) => {}
384        }
385
386        // 2. get the value from `One` or potentially insert one into `More`
387        match self {
388            Self::Empty => unreachable!(),
389            Self::One(_, v) => v,
390            Self::More(map) => map.entry(k).or_default(),
391        }
392    }
393
394    fn get(&self, k: K) -> Option<&V> {
395        #[allow(clippy::match_same_arms)]
396        match self {
397            Self::Empty => None,
398            Self::One(old_k, old_v) if *old_k == k => Some(old_v),
399            Self::One(..) => None,
400            Self::More(map) => map.get(&k),
401        }
402    }
403
404    fn get_mut(&mut self, k: K) -> Option<&mut V> {
405        #[allow(clippy::match_same_arms)]
406        match self {
407            Self::Empty => None,
408            Self::One(old_k, old_v) if *old_k == k => Some(old_v),
409            Self::One(..) => None,
410            Self::More(map) => map.get_mut(&k),
411        }
412    }
413}
414
415impl<K: EntityOrientedMapKey<V>, V> Default for EntityOrientedDenseMap<K, V> {
416    fn default() -> Self {
417        Self { chunk_start_to_value_slots: Default::default() }
418    }
419}
420
421impl<K: EntityOrientedMapKey<V>, V> EntityOrientedDenseMap<K, V> {
422    pub fn new() -> Self {
423        Self::default()
424    }
425
426    // FIXME(eddyb) this should not allocate space unconditionally, but offer an
427    // API where "vacant entry" may or may not have a `&mut Option<V>` in it.
428    pub fn entry(&mut self, key: K) -> &mut Option<V> {
429        let entity = K::to_entity(key);
430        let (chunk_start, intra_chunk_idx) = entity.to_chunk_start_and_intra_chunk_idx();
431        let chunk_value_slots =
432            self.chunk_start_to_value_slots.get_mut_or_insert_default(chunk_start);
433
434        // Ensure there are enough slots for the new entry.
435        let needed_len = intra_chunk_idx + 1;
436        if chunk_value_slots.len() < needed_len {
437            chunk_value_slots.resize_with(needed_len, Default::default);
438        }
439
440        let value_slots = &mut chunk_value_slots[intra_chunk_idx];
441        K::get_dense_value_slot_mut(key, value_slots)
442    }
443
444    pub fn insert(&mut self, key: K, value: V) -> Option<V> {
445        self.entry(key).replace(value)
446    }
447
448    pub fn get(&self, key: K) -> Option<&V> {
449        let entity = K::to_entity(key);
450        let (chunk_start, intra_chunk_idx) = entity.to_chunk_start_and_intra_chunk_idx();
451        let value_slots = self.chunk_start_to_value_slots.get(chunk_start)?.get(intra_chunk_idx)?;
452        K::get_dense_value_slot(key, value_slots).as_ref()
453    }
454
455    pub fn get_mut(&mut self, key: K) -> Option<&mut V> {
456        self.get_slot_mut(key)?.as_mut()
457    }
458
459    pub fn remove(&mut self, key: K) -> Option<V> {
460        self.get_slot_mut(key)?.take()
461    }
462
463    // FIXME(eddyb) deduplicate with `entry`.
464    fn get_slot_mut(&mut self, key: K) -> Option<&mut Option<V>> {
465        let entity = K::to_entity(key);
466        let (chunk_start, intra_chunk_idx) = entity.to_chunk_start_and_intra_chunk_idx();
467        let value_slots =
468            self.chunk_start_to_value_slots.get_mut(chunk_start)?.get_mut(intra_chunk_idx)?;
469        Some(K::get_dense_value_slot_mut(key, value_slots))
470    }
471}
472
473impl<K: EntityOrientedMapKey<V>, V> std::ops::Index<K> for EntityOrientedDenseMap<K, V> {
474    type Output = V;
475
476    fn index(&self, key: K) -> &V {
477        self.get(key).expect("no entry found for key")
478    }
479}
480
481impl<K: EntityOrientedMapKey<V>, V> std::ops::IndexMut<K> for EntityOrientedDenseMap<K, V> {
482    fn index_mut(&mut self, key: K) -> &mut V {
483        self.get_mut(key).expect("no entry found for key")
484    }
485}
486
487#[allow(rustdoc::private_intra_doc_links)]
488/// Doubly-linked list, "intrusively" going through `E::Def`, which must be an
489/// [`EntityListNode<E, _>`] (to hold the "previous/next node" links).
490///
491/// Fields are private to avoid arbitrary user interactions.
492#[derive(Copy, Clone)]
493pub struct EntityList<E: sealed::Entity>(Option<FirstLast<E, E>>);
494
495// HACK(eddyb) this only exists to give field names to the non-empty case.
496#[derive(Copy, Clone)]
497struct FirstLast<F, L> {
498    first: F,
499    last: L,
500}
501
502impl<E: sealed::Entity> Default for EntityList<E> {
503    fn default() -> Self {
504        Self(None)
505    }
506}
507
508impl<E: sealed::Entity<Def = EntityListNode<E, D>>, D> EntityList<E> {
509    pub fn empty() -> Self {
510        Self::default()
511    }
512
513    pub fn is_empty(self) -> bool {
514        self.0.is_none()
515    }
516
517    pub fn iter(self) -> EntityListIter<E> {
518        EntityListIter { first: self.0.map(|list| list.first), last: self.0.map(|list| list.last) }
519    }
520
521    /// Insert `new_node` (defined in `defs`) at the start of `self`.
522    #[track_caller]
523    pub fn insert_first(&mut self, new_node: E, defs: &mut EntityDefs<E>) {
524        let new_node_def = &mut defs[new_node];
525        assert!(
526            new_node_def.prev.is_none() && new_node_def.next.is_none(),
527            "EntityList::insert_first: new node already linked into a (different?) list"
528        );
529
530        new_node_def.next = self.0.map(|this| this.first);
531        if let Some(old_first) = new_node_def.next {
532            let old_first_def = &mut defs[old_first];
533
534            // FIXME(eddyb) this situation should be impossible anyway, as it
535            // involves the `EntityListNode`s links, which should be unforgeable,
536            // but it's still possible to keep around outdated `EntityList`s
537            // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
538            assert!(old_first_def.prev.is_none(), "invalid EntityList: `first->prev != None`");
539
540            old_first_def.prev = Some(new_node);
541        }
542
543        self.0 =
544            Some(FirstLast { first: new_node, last: self.0.map_or(new_node, |this| this.last) });
545    }
546
547    /// Insert `new_node` (defined in `defs`) at the end of `self`.
548    #[track_caller]
549    pub fn insert_last(&mut self, new_node: E, defs: &mut EntityDefs<E>) {
550        let new_node_def = &mut defs[new_node];
551        assert!(
552            new_node_def.prev.is_none() && new_node_def.next.is_none(),
553            "EntityList::insert_last: new node already linked into a (different?) list"
554        );
555
556        new_node_def.prev = self.0.map(|this| this.last);
557        if let Some(old_last) = new_node_def.prev {
558            let old_last_def = &mut defs[old_last];
559
560            // FIXME(eddyb) this situation should be impossible anyway, as it
561            // involves the `EntityListNode`s links, which should be unforgeable,
562            // but it's still possible to keep around outdated `EntityList`s
563            // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
564            assert!(old_last_def.next.is_none(), "invalid EntityList: `last->next != None`");
565
566            old_last_def.next = Some(new_node);
567        }
568
569        self.0 =
570            Some(FirstLast { first: self.0.map_or(new_node, |this| this.first), last: new_node });
571    }
572
573    /// Insert `new_node` (defined in `defs`) into `self`, before `next`.
574    //
575    // FIXME(eddyb) unify this with the other insert methods, maybe with a new
576    // "insert position" type?
577    #[track_caller]
578    pub fn insert_before(&mut self, new_node: E, next: E, defs: &mut EntityDefs<E>) {
579        let prev = defs[next].prev.replace(new_node);
580
581        let new_node_def = &mut defs[new_node];
582        assert!(
583            new_node_def.prev.is_none() && new_node_def.next.is_none(),
584            "EntityList::insert_before: new node already linked into a (different?) list"
585        );
586
587        new_node_def.prev = prev;
588        new_node_def.next = Some(next);
589
590        match prev {
591            Some(prev) => {
592                let old_prev_next = defs[prev].next.replace(new_node);
593
594                // FIXME(eddyb) this situation should be impossible anyway, as it
595                // involves the `EntityListNode`s links, which should be unforgeable.
596                assert!(
597                    old_prev_next == Some(next),
598                    "invalid EntityListNode: `node->prev->next != node`"
599                );
600            }
601            None => {
602                // FIXME(eddyb) this situation should be impossible anyway, as it
603                // involves the `EntityListNode`s links, which should be unforgeable,
604                // but it's still possible to keep around outdated `EntityList`s
605                // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
606                assert!(
607                    self.0.map(|this| this.first) == Some(next),
608                    "invalid EntityList: `node->prev == None` but `node != first`"
609                );
610
611                self.0.as_mut().unwrap().first = new_node;
612            }
613        }
614    }
615
616    /// Insert all of `list_to_prepend`'s nodes at the start of `self`.
617    #[track_caller]
618    pub fn prepend(&mut self, list_to_prepend: Self, defs: &mut EntityDefs<E>) {
619        *self = Self::concat(list_to_prepend, *self, defs);
620    }
621
622    /// Insert all of `list_to_append`'s nodes at the end of `self`.
623    #[track_caller]
624    pub fn append(&mut self, list_to_append: Self, defs: &mut EntityDefs<E>) {
625        *self = Self::concat(*self, list_to_append, defs);
626    }
627
628    /// Private helper for `prepend`/`append`.
629    #[track_caller]
630    fn concat(a: Self, b: Self, defs: &mut EntityDefs<E>) -> Self {
631        let (a, b) = match (a.0, b.0) {
632            (Some(a), Some(b)) => (a, b),
633            (a, b) => return Self(a.or(b)),
634        };
635
636        {
637            let a_last_def = &mut defs[a.last];
638
639            // FIXME(eddyb) this situation should be impossible anyway, as it
640            // involves the `EntityListNode`s links, which should be unforgeable,
641            // but it's still possible to keep around outdated `EntityList`s
642            // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
643            assert!(a_last_def.next.is_none(), "invalid EntityList: `last->next != None`");
644
645            a_last_def.next = Some(b.first);
646        }
647        {
648            let b_first_def = &mut defs[b.first];
649
650            // FIXME(eddyb) this situation should be impossible anyway, as it
651            // involves the `EntityListNode`s links, which should be unforgeable,
652            // but it's still possible to keep around outdated `EntityList`s
653            // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
654            assert!(b_first_def.prev.is_none(), "invalid EntityList: `first->prev != None`");
655
656            b_first_def.prev = Some(a.last);
657        }
658
659        Self(Some(FirstLast { first: a.first, last: b.last }))
660    }
661
662    /// Remove `node` (defined in `defs`) from `self`.
663    #[track_caller]
664    pub fn remove(&mut self, node: E, defs: &mut EntityDefs<E>) {
665        // Unlink `node->{prev,next}` first (also allowing re-insertion elsewhere).
666        let (prev, next) = {
667            let node_def = &mut defs[node];
668            (node_def.prev.take(), node_def.next.take())
669        };
670
671        // Unlink `prev->next = node` (or validate `first = node`).
672        match prev {
673            Some(prev) => {
674                let old_prev_next = mem::replace(&mut defs[prev].next, next);
675
676                // FIXME(eddyb) this situation should be impossible anyway, as it
677                // involves the `EntityListNode`s links, which should be unforgeable.
678                assert!(
679                    old_prev_next == Some(node),
680                    "invalid EntityListNode: `node->prev->next != node`"
681                );
682            }
683            None => {
684                // FIXME(eddyb) this situation should be impossible anyway, as it
685                // involves the `EntityListNode`s links, which should be unforgeable,
686                // but it's still possible to keep around outdated `EntityList`s
687                // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
688                assert!(
689                    self.0.map(|this| this.first) == Some(node),
690                    "invalid EntityList: `node->prev == None` but `node != first`"
691                );
692            }
693        }
694
695        // Unlink `next->prev = node` (or validate `last = node`).
696        match next {
697            Some(next) => {
698                let old_next_prev = mem::replace(&mut defs[next].prev, prev);
699
700                // FIXME(eddyb) this situation should be impossible anyway, as it
701                // involves the `EntityListNode`s links, which should be unforgeable.
702                assert!(
703                    old_next_prev == Some(node),
704                    "invalid EntityListNode: `node->next->prev != node`"
705                );
706            }
707            None => {
708                // FIXME(eddyb) this situation should be impossible anyway, as it
709                // involves the `EntityListNode`s links, which should be unforgeable,
710                // but it's still possible to keep around outdated `EntityList`s
711                // (should `EntityList` not implement `Copy`/`Clone` *at all*?)
712                assert!(
713                    self.0.map(|this| this.last) == Some(node),
714                    "invalid EntityList: `node->next == None` but `node != last`"
715                );
716            }
717        }
718
719        // Update list end-points (overwritten `first`/`last` validated above).
720        match (prev, next) {
721            (Some(_), Some(_)) => {}
722            (None, Some(next)) => self.0.as_mut().unwrap().first = next,
723            (Some(prev), None) => self.0.as_mut().unwrap().last = prev,
724            (None, None) => self.0 = None,
725        }
726    }
727}
728
729/// [`EntityList<E>`] iterator, but with a different API than [`Iterator`].
730///
731/// This can also be considered a (non-random-access) "subslice" of the list.
732#[derive(Copy, Clone)]
733pub struct EntityListIter<E: sealed::Entity> {
734    pub first: Option<E>,
735    pub last: Option<E>,
736}
737
738impl<E: sealed::Entity<Def = EntityListNode<E, D>>, D> EntityListIter<E> {
739    #[track_caller]
740    pub fn split_first(self, defs: &EntityDefs<E>) -> Option<(E, Self)> {
741        let Self { first, last } = self;
742        let current = first?;
743        let next = defs[current].next;
744        match next {
745            // FIXME(eddyb) this situation should be impossible anyway, as it
746            // involves the `EntityListNode`s links, which should be unforgeable.
747            Some(next) => assert!(
748                defs[next].prev == Some(current),
749                "invalid EntityListNode: `node->next->prev != node`"
750            ),
751
752            None => assert!(
753                Some(current) == last,
754                "invalid EntityListIter: `first->next->...->next != last`"
755            ),
756        }
757        Some((current, Self { first: next, last }))
758    }
759
760    #[track_caller]
761    pub fn split_last(self, defs: &EntityDefs<E>) -> Option<(E, Self)> {
762        let Self { first, last } = self;
763        let current = last?;
764        let prev = defs[current].prev;
765        match prev {
766            // FIXME(eddyb) this situation should be impossible anyway, as it
767            // involves the `EntityListNode`s links, which should be unforgeable.
768            Some(prev) => assert!(
769                defs[prev].next == Some(current),
770                "invalid EntityListNode: `node->prev->next != node`"
771            ),
772
773            None => assert!(
774                Some(current) == first,
775                "invalid EntityListIter: `last->prev->...->prev != first`"
776            ),
777        }
778        Some((current, Self { first, last: prev }))
779    }
780}
781
782/// [`EntityList<E>`] node, containing the "intrusive" list links, and the rest of
783/// the entity definition (the `inner_def` field of type `D`).
784///
785/// Fields are private to avoid arbitrary user interactions outside of special
786/// methods and [`Deref`]/[`DerefMut`].
787//
788// FIXME(eddyb) `Deref`/`DerefMut` aren't the best API, could this be hidden
789// further by making `EntityDefs` hide the list links in the `Index` impl?
790#[derive(Clone)]
791pub struct EntityListNode<E: sealed::Entity<Def = Self>, D> {
792    prev: Option<E>,
793    next: Option<E>,
794
795    inner_def: D,
796}
797
798impl<E: sealed::Entity<Def = Self>, D> From<D> for EntityListNode<E, D> {
799    fn from(inner_def: D) -> Self {
800        Self { prev: None, next: None, inner_def }
801    }
802}
803
804impl<E: sealed::Entity<Def = Self>, D> EntityListNode<E, D> {
805    pub fn prev_in_list(&self) -> Option<E> {
806        self.prev
807    }
808    pub fn next_in_list(&self) -> Option<E> {
809        self.next
810    }
811}
812
813impl<E: sealed::Entity<Def = Self>, D> Deref for EntityListNode<E, D> {
814    type Target = D;
815    fn deref(&self) -> &D {
816        &self.inner_def
817    }
818}
819
820impl<E: sealed::Entity<Def = Self>, D> DerefMut for EntityListNode<E, D> {
821    fn deref_mut(&mut self) -> &mut D {
822        &mut self.inner_def
823    }
824}
825
826macro_rules! interners {
827    (
828        needs_as_ref { $($needs_as_ref_ty:ty),* $(,)? }
829        $($name:ident $(default($default:expr))? => $ty:ty),+ $(,)?
830    ) => {
831        $(impl AsRef<Self> for $needs_as_ref_ty {
832            fn as_ref(&self) -> &Self {
833                self
834            }
835        })*
836
837        #[allow(non_snake_case)]
838        #[derive(Default)]
839        struct Interners {
840            $($name: sealed::Interner<$name>),*
841        }
842
843        $(
844            // NOTE(eddyb) never derive `PartialOrd, Ord` for these types, as
845            // observing the interning order shouldn't be allowed.
846            #[derive(Copy, Clone, PartialEq, Eq, Hash)]
847            pub struct $name(
848                // FIXME(eddyb) figure out how to sneak niches into these types, to
849                // allow e.g. `Option` around them to not increase the size.
850                #[doc(hidden)] u32,
851            );
852
853            $(impl Default for $name {
854                fn default() -> Self {
855                    // HACK(eddyb) have to mention `$default` in this `$(...)?`
856                    // to gate its presence on `$default`'s presence.
857                    if false { let _ = $default; }
858
859                    Self(0)
860                }
861            })?
862
863            impl sealed::Interned for $name {
864                type Def = $ty;
865
866                $(fn preintern(interner: &sealed::Interner<Self>) {
867                    interner.intern($default);
868                })?
869                #[inline(always)]
870                fn from_u32(i: u32) -> Self {
871                    Self(i)
872                }
873                #[inline(always)]
874                fn to_u32(self) -> u32 {
875                    self.0
876                }
877                #[inline(always)]
878                fn cx_interner(cx: &Context) -> &sealed::Interner<Self> {
879                    &cx.interners.$name
880                }
881            }
882        )*
883    };
884}
885
886interners! {
887    needs_as_ref {
888        crate::AttrSetDef,
889        crate::TypeDef,
890        crate::ConstDef,
891        crate::DataInstFormDef,
892    }
893
894    // FIXME(eddyb) consider a more uniform naming scheme than the combination
895    // of `InternedFoo => Foo` and `Foo => FooDef`.
896    InternedStr => str,
897    AttrSet default(crate::AttrSetDef::default()) => crate::AttrSetDef,
898    Type => crate::TypeDef,
899    Const => crate::ConstDef,
900    DataInstForm => crate::DataInstFormDef,
901}
902
903impl<I: sealed::Interned> InternInCx<I> for I::Def
904where
905    I::Def: Sized + AsRef<I::Def>,
906{
907    fn intern_in_cx(self, cx: &Context) -> I {
908        I::cx_interner(cx).intern(self)
909    }
910}
911
912impl InternInCx<InternedStr> for &'_ str {
913    fn intern_in_cx(self, cx: &Context) -> InternedStr {
914        cx.interners.InternedStr.intern(self)
915    }
916}
917
918impl InternInCx<InternedStr> for String {
919    fn intern_in_cx(self, cx: &Context) -> InternedStr {
920        cx.interners.InternedStr.intern(self)
921    }
922}
923
924macro_rules! entities {
925    (
926        $($name:ident => chunk_size($chunk_size:literal) $def:ty),+ $(,)?
927    ) => {
928        #[allow(non_snake_case)]
929        #[derive(Default)]
930        struct EntityAllocs {
931            $($name: sealed::EntityAlloc<$name>),*
932        }
933
934        $(
935            // NOTE(eddyb) never derive `PartialOrd, Ord` for these types, as
936            // observing the entity index allocation order shouldn't be allowed.
937            #[derive(Copy, Clone, PartialEq, Eq, Hash)]
938            pub struct $name(#[doc(hidden)] NonZeroU32);
939
940            impl sealed::Entity for $name {
941                type Def = $def;
942
943                const CHUNK_SIZE: u32 = $chunk_size;
944
945                #[inline(always)]
946                fn from_non_zero_u32(i: NonZeroU32) -> Self {
947                    Self(i)
948                }
949                #[inline(always)]
950                fn to_non_zero_u32(self) -> NonZeroU32 {
951                    self.0
952                }
953                #[inline(always)]
954                fn cx_entity_alloc(cx: &Context) -> &sealed::EntityAlloc<Self> {
955                    &cx.entity_allocs.$name
956                }
957            }
958        )*
959    };
960}
961
962entities! {
963    GlobalVar => chunk_size(0x1_0000) crate::GlobalVarDecl,
964    Func => chunk_size(0x1_0000) crate::FuncDecl,
965    ControlRegion => chunk_size(0x1000) crate::ControlRegionDef,
966    ControlNode => chunk_size(0x1000) EntityListNode<ControlNode, crate::ControlNodeDef>,
967    DataInst => chunk_size(0x1000) EntityListNode<DataInst, crate::DataInstDef>,
968}