allocative/
visitor.rs

1/*
2 * Copyright (c) Meta Platforms, Inc. and affiliates.
3 *
4 * This source code is licensed under both the MIT license found in the
5 * LICENSE-MIT file in the root directory of this source tree and the Apache
6 * License, Version 2.0 found in the LICENSE-APACHE file in the root directory
7 * of this source tree.
8 */
9
10use std::mem;
11
12use crate::allocative_trait::Allocative;
13use crate::impls::common::CAPACITY_NAME;
14use crate::impls::common::DATA_NAME;
15use crate::impls::common::KEY_NAME;
16use crate::impls::common::UNUSED_CAPACITY_NAME;
17use crate::impls::common::VALUE_NAME;
18use crate::key::Key;
19
20/// Actual implementation of the visitor.
21///
22/// At the moment there's only one implementation, the one which generates flame graph,
23/// and this trait is crate-private. This may change in the future.
24pub(crate) trait VisitorImpl {
25    /// Enter simple field like `u32`.
26    /// All sizes are in bytes.
27    fn enter_inline_impl(&mut self, name: Key, size: usize, parent: NodeKind);
28    /// Enter field which points to heap-allocated unique memory (e.g. `Box<T>`).
29    fn enter_unique_impl(&mut self, name: Key, size: usize, parent: NodeKind);
30    /// Enter field which points to heap-allocated shared memory (e.g. `Arc<T>`).
31    /// This function returns `false` if pointee already visited.
32    #[must_use]
33    fn enter_shared_impl(
34        &mut self,
35        name: Key,
36        size: usize,
37        ptr: *const (),
38        parent: NodeKind,
39    ) -> bool;
40
41    /// Exit the field. Each `enter_` must be matched by `exit_`.
42    /// `Visitor` wrapper guarantees that.
43    fn exit_inline_impl(&mut self);
44    fn exit_unique_impl(&mut self);
45    fn exit_shared_impl(&mut self);
46    // Exit "root" visitor.
47    fn exit_root_impl(&mut self);
48}
49
50#[derive(Copy, Clone)]
51pub(crate) enum NodeKind {
52    Inline,
53    Unique,
54    Shared,
55    Root,
56}
57
58#[must_use] // Must call `.exit()`.
59pub struct Visitor<'a> {
60    pub(crate) visitor: &'a mut dyn VisitorImpl,
61    pub(crate) node_kind: NodeKind,
62}
63
64impl<'a> Drop for Visitor<'a> {
65    fn drop(&mut self) {
66        self.exit_impl();
67    }
68}
69
70impl<'a> Visitor<'a> {
71    pub fn enter<'b>(&'b mut self, name: Key, size: usize) -> Visitor<'b>
72    where
73        'a: 'b,
74    {
75        self.visitor.enter_inline_impl(name, size, self.node_kind);
76        Visitor {
77            visitor: self.visitor,
78            node_kind: NodeKind::Inline,
79        }
80    }
81
82    pub fn enter_unique<'b>(&'b mut self, name: Key, size: usize) -> Visitor<'b>
83    where
84        'a: 'b,
85    {
86        self.visitor.enter_unique_impl(name, size, self.node_kind);
87        Visitor {
88            visitor: self.visitor,
89            node_kind: NodeKind::Unique,
90        }
91    }
92
93    /// Enter a field containing a shared pointer.
94    ///
95    /// This functions does nothing and returns `None`
96    /// if pointee (`ptr` argument) was previously visited.
97    pub fn enter_shared<'b>(
98        &'b mut self,
99        name: Key,
100        size: usize,
101        ptr: *const (),
102    ) -> Option<Visitor<'b>>
103    where
104        'a: 'b,
105    {
106        if self
107            .visitor
108            .enter_shared_impl(name, size, ptr, self.node_kind)
109        {
110            Some(Visitor {
111                visitor: self.visitor,
112                node_kind: NodeKind::Shared,
113            })
114        } else {
115            None
116        }
117    }
118
119    /// This function is typically called as the first function of an `Allocative`
120    /// implementation to record self.
121    pub fn enter_self_sized<'b, T>(&'b mut self) -> Visitor<'b>
122    where
123        'a: 'b,
124    {
125        self.enter(Key::for_type_name::<T>(), mem::size_of::<T>())
126    }
127
128    /// This function is typically called as first function of an `Allocative`
129    /// implementation to record self.
130    pub fn enter_self<'b, T: ?Sized>(&'b mut self, this: &T) -> Visitor<'b>
131    where
132        'a: 'b,
133    {
134        self.enter(Key::for_type_name::<T>(), mem::size_of_val(this))
135    }
136
137    /// Visit simple sized field (e.g. `u32`) without descending into children.
138    pub fn visit_simple<'b>(&'b mut self, name: Key, size: usize)
139    where
140        'a: 'b,
141    {
142        self.enter(name, size).exit();
143    }
144
145    /// Visit simple sized field (e.g. `u32`) without descending into children.
146    pub fn visit_simple_sized<'b, T>(&'b mut self)
147    where
148        'a: 'b,
149    {
150        self.enter_self_sized::<T>().exit();
151    }
152
153    pub fn visit_field<'b, T: Allocative + ?Sized>(&'b mut self, name: Key, field: &T)
154    where
155        'a: 'b,
156    {
157        self.visit_field_with(name, mem::size_of_val::<T>(field), |visitor| {
158            field.visit(visitor);
159        })
160    }
161
162    /// Similar to `visit_field` but instead of calling [`Allocative::visit`] for
163    /// whichever is the field type, you can provide a custom closure to call
164    /// instead.
165    ///
166    /// Useful if the field type does not implement [`Allocative`].
167    pub fn visit_field_with<'b, 'f, F: for<'c, 'd> FnOnce(&'d mut Visitor<'c>)>(
168        &'b mut self,
169        name: Key,
170        field_size: usize,
171        visit: F,
172    ) {
173        let mut visitor = self.enter(name, field_size);
174        visit(&mut visitor);
175        visitor.exit();
176    }
177
178    pub fn visit_slice<'b, T: Allocative>(&'b mut self, slice: &[T])
179    where
180        'a: 'b,
181    {
182        self.visit_iter(slice);
183    }
184
185    pub fn visit_iter<'b, 'i, T: Allocative + 'i, I: IntoIterator<Item = &'i T>>(
186        &'b mut self,
187        iter: I,
188    ) where
189        'a: 'b,
190    {
191        if !mem::needs_drop::<T>() || mem::size_of::<T>() == 0 {
192            // `T` has no pointers it owns.
193            self.visit_simple(
194                Key::for_type_name::<T>(),
195                mem::size_of::<T>() * iter.into_iter().count(),
196            );
197        } else {
198            for item in iter {
199                item.visit(self);
200            }
201        }
202    }
203
204    pub fn visit_vec_like_body<'b, T>(&'b mut self, data: &[T], capacity: usize)
205    where
206        'a: 'b,
207        T: Allocative,
208    {
209        self.visit_field_with(CAPACITY_NAME, mem::size_of::<T>() * capacity, |visitor| {
210            visitor.visit_slice(data);
211            visitor.visit_simple(
212                UNUSED_CAPACITY_NAME,
213                mem::size_of::<T>() * capacity.wrapping_sub(data.len()),
214            );
215        })
216    }
217
218    pub fn visit_generic_map_fields<'b, 'x, K: Allocative + 'x, V: Allocative + 'x>(
219        &'b mut self,
220        entries: impl IntoIterator<Item = (&'x K, &'x V)>,
221    ) {
222        self.visit_field_with(DATA_NAME, mem::size_of::<*const ()>(), move |visitor| {
223            for (k, v) in entries {
224                visitor.visit_field(KEY_NAME, k);
225                visitor.visit_field(VALUE_NAME, v);
226            }
227        })
228    }
229
230    pub fn visit_generic_set_fields<'b, 'x, K: Allocative + 'x>(
231        &'b mut self,
232        entries: impl IntoIterator<Item = &'x K>,
233    ) where
234        'a: 'b,
235    {
236        self.visit_field_with(DATA_NAME, mem::size_of::<*const ()>(), |visitor| {
237            for k in entries {
238                visitor.visit_field(KEY_NAME, k);
239            }
240        })
241    }
242
243    fn exit_impl(&mut self) {
244        match self.node_kind {
245            NodeKind::Inline => self.visitor.exit_inline_impl(),
246            NodeKind::Unique => self.visitor.exit_unique_impl(),
247            NodeKind::Shared => self.visitor.exit_shared_impl(),
248            NodeKind::Root => self.visitor.exit_root_impl(),
249        }
250    }
251
252    pub fn exit(mut self) {
253        self.exit_impl();
254        // Prevent `drop`.
255        mem::forget(self);
256    }
257}