allocative/impls/std/
sync.rs1use std::alloc::Layout;
11use std::mem;
12use std::rc;
13use std::rc::Rc;
14use std::sync::atomic::AtomicBool;
15use std::sync::atomic::AtomicI16;
16use std::sync::atomic::AtomicI32;
17use std::sync::atomic::AtomicI64;
18use std::sync::atomic::AtomicI8;
19use std::sync::atomic::AtomicIsize;
20use std::sync::atomic::AtomicU16;
21use std::sync::atomic::AtomicU32;
22use std::sync::atomic::AtomicU64;
23use std::sync::atomic::AtomicU8;
24use std::sync::atomic::AtomicUsize;
25use std::sync::Arc;
26use std::sync::Mutex;
27use std::sync::RwLock;
28use std::sync::Weak;
29
30use crate::allocative_trait::Allocative;
31use crate::impls::common::PTR_NAME;
32use crate::key::Key;
33use crate::visitor::Visitor;
34
35impl<T: Allocative> Allocative for RwLock<T> {
36 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
37 let mut visitor = visitor.enter_self_sized::<Self>();
38 if let Ok(data) = self.try_read() {
39 visitor.visit_field(Key::new("data"), &*data);
40 }
41 visitor.exit();
42 }
43}
44
45#[allow(dead_code)] #[repr(C)] struct RcBox<T: ?Sized> {
48 a: usize,
49 b: usize,
50 t: T,
51}
52
53impl<T: ?Sized> RcBox<T> {
54 fn layout(val: &T) -> Layout {
55 let val_layout = Layout::for_value(val);
56 Layout::new::<RcBox<()>>()
58 .extend(val_layout)
59 .unwrap()
60 .0
61 .pad_to_align()
62 }
63}
64
65impl<T: Allocative + ?Sized> Allocative for Arc<T> {
66 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
67 let mut visitor = visitor.enter_self_sized::<Self>();
68 {
69 let visitor = visitor.enter_shared(
70 PTR_NAME,
71 mem::size_of::<*const T>(),
73 Arc::as_ptr(self) as *const (),
76 );
77 if let Some(mut visitor) = visitor {
78 {
79 let val: &T = self;
80 let mut visitor =
81 visitor.enter(Key::new("ArcInner"), RcBox::layout(val).size());
82 val.visit(&mut visitor);
83 visitor.exit();
84 }
85 visitor.exit();
86 }
87 }
88 visitor.exit();
89 }
90}
91
92impl<T: Allocative + ?Sized> Allocative for Weak<T> {
93 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
94 let mut visitor = visitor.enter_self_sized::<Self>();
95 {
96 if let Some(arc) = self.upgrade() {
97 arc.visit(&mut visitor);
98 }
99 }
100 visitor.exit();
101 }
102}
103
104impl<T: Allocative> Allocative for Rc<T> {
105 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
106 let mut visitor = visitor.enter_self_sized::<Self>();
107 {
108 let visitor = visitor.enter_shared(
109 PTR_NAME,
110 mem::size_of::<*const T>(),
112 Rc::as_ptr(self) as *const (),
115 );
116 if let Some(mut visitor) = visitor {
117 {
118 let val: &T = self;
119 let mut visitor = visitor.enter(Key::new("RcInner"), RcBox::layout(val).size());
120 val.visit(&mut visitor);
121 visitor.exit();
122 }
123 visitor.exit();
124 }
125 }
126 visitor.exit();
127 }
128}
129
130impl<T: Allocative> Allocative for rc::Weak<T> {
131 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
132 let mut visitor = visitor.enter_self_sized::<Self>();
133 {
134 if let Some(rc) = self.upgrade() {
135 rc.visit(&mut visitor);
136 }
137 }
138 visitor.exit();
139 }
140}
141
142impl Allocative for AtomicU8 {
143 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
144 visitor.enter_self_sized::<Self>().exit();
145 }
146}
147
148impl Allocative for AtomicU16 {
149 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
150 visitor.enter_self_sized::<Self>().exit();
151 }
152}
153
154impl Allocative for AtomicU32 {
155 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
156 visitor.enter_self_sized::<Self>().exit();
157 }
158}
159
160impl Allocative for AtomicU64 {
161 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
162 visitor.enter_self_sized::<Self>().exit();
163 }
164}
165
166impl Allocative for AtomicUsize {
167 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
168 visitor.enter_self_sized::<Self>().exit();
169 }
170}
171
172impl Allocative for AtomicI8 {
173 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
174 visitor.enter_self_sized::<Self>().exit();
175 }
176}
177
178impl Allocative for AtomicI16 {
179 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
180 visitor.enter_self_sized::<Self>().exit();
181 }
182}
183
184impl Allocative for AtomicI32 {
185 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
186 visitor.enter_self_sized::<Self>().exit();
187 }
188}
189
190impl Allocative for AtomicI64 {
191 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
192 visitor.enter_self_sized::<Self>().exit();
193 }
194}
195
196impl Allocative for AtomicBool {
197 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
198 visitor.enter_self_sized::<Self>().exit();
199 }
200}
201
202impl Allocative for AtomicIsize {
203 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
204 visitor.enter_self_sized::<Self>().exit();
205 }
206}
207
208impl<T: Allocative> Allocative for Mutex<T> {
209 fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
210 let mut visitor = visitor.enter_self_sized::<Self>();
211 if let Ok(data) = self.try_lock() {
212 visitor.visit_field(Key::new("data"), &*data);
213 }
214 visitor.exit();
215 }
216}
217
218#[cfg(test)]
219mod tests {
220 use std::sync::Arc;
221
222 use crate as allocative;
223 use crate::golden::golden_test;
224 use crate::Allocative;
225
226 #[derive(Allocative)]
227 #[repr(align(64))]
228 struct CacheLine(u8);
229
230 #[test]
231 fn test_arc_align() {
232 assert_eq!(std::mem::size_of::<CacheLine>(), 64);
233
234 golden_test!(&Arc::new(CacheLine(0)));
238 }
239}