1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
//! Cache computations and efficiently reuse them.
use std::cell::RefCell;
use std::fmt;
use std::mem;
use std::sync::atomic::{self, AtomicU64};

/// A simple cache that stores generated values to avoid recomputation.
///
/// Keeps track of the last generated value after clearing.
pub struct Cache<T> {
    group: Group,
    state: RefCell<State<T>>,
}

impl<T> Cache<T> {
    /// Creates a new empty [`Cache`].
    pub fn new() -> Self {
        Cache {
            group: Group::singleton(),
            state: RefCell::new(State::Empty { previous: None }),
        }
    }

    /// Creates a new empty [`Cache`] with the given [`Group`].
    ///
    /// Caches within the same group may reuse internal rendering storage.
    ///
    /// You should generally group caches that are likely to change
    /// together.
    pub fn with_group(group: Group) -> Self {
        assert!(
            !group.is_singleton(),
            "The group {group:?} cannot be shared!"
        );

        Cache {
            group,
            state: RefCell::new(State::Empty { previous: None }),
        }
    }

    /// Returns the [`Group`] of the [`Cache`].
    pub fn group(&self) -> Group {
        self.group
    }

    /// Puts the given value in the [`Cache`].
    ///
    /// Notice that, given this is a cache, a mutable reference is not
    /// necessary to call this method. You can safely update the cache in
    /// rendering code.
    pub fn put(&self, value: T) {
        *self.state.borrow_mut() = State::Filled { current: value };
    }

    /// Returns a reference cell to the internal [`State`] of the [`Cache`].
    pub fn state(&self) -> &RefCell<State<T>> {
        &self.state
    }

    /// Clears the [`Cache`].
    pub fn clear(&self) {
        let mut state = self.state.borrow_mut();

        let previous =
            mem::replace(&mut *state, State::Empty { previous: None });

        let previous = match previous {
            State::Empty { previous } => previous,
            State::Filled { current } => Some(current),
        };

        *state = State::Empty { previous };
    }
}

/// A cache group.
///
/// Caches that share the same group generally change together.
///
/// A cache group can be used to implement certain performance
/// optimizations during rendering, like batching or sharing atlases.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Group {
    id: u64,
    is_singleton: bool,
}

impl Group {
    /// Generates a new unique cache [`Group`].
    pub fn unique() -> Self {
        static NEXT: AtomicU64 = AtomicU64::new(0);

        Self {
            id: NEXT.fetch_add(1, atomic::Ordering::Relaxed),
            is_singleton: false,
        }
    }

    /// Returns `true` if the [`Group`] can only ever have a
    /// single [`Cache`] in it.
    ///
    /// This is the default kind of [`Group`] assigned when using
    /// [`Cache::new`].
    ///
    /// Knowing that a [`Group`] will never be shared may be
    /// useful for rendering backends to perform additional
    /// optimizations.
    pub fn is_singleton(self) -> bool {
        self.is_singleton
    }

    fn singleton() -> Self {
        Self {
            is_singleton: true,
            ..Self::unique()
        }
    }
}

impl<T> fmt::Debug for Cache<T>
where
    T: fmt::Debug,
{
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        use std::ops::Deref;

        let state = self.state.borrow();

        match state.deref() {
            State::Empty { previous } => {
                write!(f, "Cache::Empty {{ previous: {previous:?} }}")
            }
            State::Filled { current } => {
                write!(f, "Cache::Filled {{ current: {current:?} }}")
            }
        }
    }
}

impl<T> Default for Cache<T> {
    fn default() -> Self {
        Self::new()
    }
}

/// The state of a [`Cache`].
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum State<T> {
    /// The [`Cache`] is empty.
    Empty {
        /// The previous value of the [`Cache`].
        previous: Option<T>,
    },
    /// The [`Cache`] is filled.
    Filled {
        /// The current value of the [`Cache`]
        current: T,
    },
}

/// A piece of data that can be cached.
pub trait Cached: Sized {
    /// The type of cache produced.
    type Cache: Clone;

    /// Loads the [`Cache`] into a proper instance.
    ///
    /// [`Cache`]: Self::Cache
    fn load(cache: &Self::Cache) -> Self;

    /// Caches this value, producing its corresponding [`Cache`].
    ///
    /// [`Cache`]: Self::Cache
    fn cache(self, group: Group, previous: Option<Self::Cache>) -> Self::Cache;
}

#[cfg(debug_assertions)]
impl Cached for () {
    type Cache = ();

    fn load(_cache: &Self::Cache) -> Self {}

    fn cache(
        self,
        _group: Group,
        _previous: Option<Self::Cache>,
    ) -> Self::Cache {
    }
}