libcstl
Loading...
Searching...
No Matches
memory.c
Go to the documentation of this file.
1/*!
2 * @file
3 */
4
5#include "cstl/memory.h"
6
7#include <assert.h>
8#include <stdlib.h>
9#include <stdatomic.h>
10#include <sched.h>
11
12/*
13 * cstl_shared_ptr_data is used to manage memory allocated by shared pointers.
14 * it is also pointed to by the weak pointer objects.
15 */
16struct cstl_shared_ptr_data
17{
18 struct
19 {
20 /*
21 * hard refers to the number of shared pointers that manage
22 * the memory. soft is the number of shared and weak pointers
23 * that point to this cstl_shared_ptr_data structure.
24 *
25 * the lock member is used to prevent weak pointers from racing
26 * with each other when trying to convert to a shared pointer
27 */
28 atomic_size_t hard, soft;
29 atomic_flag lock;
30 } ref;
31 /*
32 * The unique pointer is a pointer to the managed memory.
33 * There may be many shared and weak pointers pointing to
34 * this object, but this object is the only thing
35 * containing/pointing to the memory managed within the
36 * unique pointer.
37 */
39};
40
41void cstl_unique_ptr_alloc(cstl_unique_ptr_t * const up, const size_t sz,
42 cstl_xtor_func_t * const clr, void * const priv)
43{
45 if (sz > 0) {
46 void * const ptr = malloc(sz);
47 if (ptr != NULL) {
48 cstl_guarded_ptr_set(&up->gp, ptr);
49 up->clr.func = clr;
50 up->clr.priv = priv;
51 }
52 }
53}
54
56{
57 void * const ptr = cstl_guarded_ptr_get(&up->gp);
58 if (up->clr.func != NULL) {
59 up->clr.func(ptr, up->clr.priv);
60 }
61 free(ptr);
63}
64
65void cstl_shared_ptr_alloc(cstl_shared_ptr_t * const sp, const size_t sz,
66 cstl_xtor_func_t * const clr)
67{
69
70 if (sz > 0) {
71 struct cstl_shared_ptr_data * data;
72
73 data = malloc(sizeof(*data));
74 if (data != NULL) {
75 atomic_init(&data->ref.hard, 1);
76 atomic_init(&data->ref.soft, 1);
77 atomic_flag_clear(&data->ref.lock);
78
79 cstl_unique_ptr_init(&data->up);
80 cstl_unique_ptr_alloc(&data->up, sz, clr, NULL);
81
82 if (cstl_unique_ptr_get(&data->up) != NULL) {
83 cstl_guarded_ptr_set(&sp->data, data);
84 data = NULL;
85 }
86
87 free(data);
88 }
89 }
90}
91
93{
94 const struct cstl_shared_ptr_data * const data =
96 int count = 1;
97 if (data != NULL) {
98 count = atomic_load(&data->ref.soft);
99 }
100 return count == 1;
101}
102
103const void * cstl_shared_ptr_get_const(const cstl_shared_ptr_t * const sp)
104{
105 const struct cstl_shared_ptr_data * const data =
107 if (data != NULL) {
108 return cstl_unique_ptr_get_const(&data->up);
109 }
110 return NULL;
111}
112
114 cstl_shared_ptr_t * const n)
115{
116 struct cstl_shared_ptr_data * data;
117
119 cstl_guarded_ptr_copy(&n->data, &e->data);
120
121 data = cstl_guarded_ptr_get(&n->data);
122 if (data != NULL) {
123 atomic_fetch_add(&data->ref.hard, 1);
124 atomic_fetch_add(&data->ref.soft, 1);
125 }
126}
127
129{
130 struct cstl_shared_ptr_data * const data =
131 cstl_guarded_ptr_get(&sp->data);
132
133 if (data != NULL) {
134 if (atomic_fetch_sub(&data->ref.hard, 1) == 1) {
135 cstl_unique_ptr_reset(&data->up);
136 }
137
138 /*
139 * manage the shared data structure via the
140 * weak pointer code; it's the same handling
141 */
143 }
144}
145
147 const cstl_shared_ptr_t * const sp)
148{
149 struct cstl_shared_ptr_data * data;
150
152 cstl_guarded_ptr_copy(&wp->data, &sp->data);
153
154 data = cstl_guarded_ptr_get(&wp->data);
155 if (data != NULL) {
156 atomic_fetch_add(&data->ref.soft, 1);
157 }
158}
159
161 cstl_shared_ptr_t * const sp)
162{
163 struct cstl_shared_ptr_data * data;
164
166 cstl_guarded_ptr_copy(&sp->data, &wp->data);
167
168 data = cstl_guarded_ptr_get(&sp->data);
169 if (data != NULL) {
170 /*
171 * the weak pointer wants to increment the hard reference
172 * only if the hard reference is already greater than 0.
173 * the atomic interfaces don't allow checking the current
174 * count until *after* the reference is incremented. this
175 * means that another weak pointer could race with this one
176 * and see a value of 1 when it does its check and assume
177 * that the memory is live.
178 *
179 * in order to prevent this race, the code "spins" on this
180 * lock flag because the operations done under the lock are
181 * non-blocking, and the code currently holding the lock
182 * should exit quickly.
183 */
184 while (atomic_flag_test_and_set(&data->ref.lock)) {
185 sched_yield(); // GCOV_EXCL_LINE
186 }
187
188 /*
189 * since we can't race with other weak pointers, if the
190 * counter is greater than 0, we know that the underlying
191 * memory is live. it is not possible for this code to
192 * race with a shared ptr--in the sense that a shared pointer
193 * erroneously sees a 1 in the hard counter and assumes
194 * that the underlying memory is live--because if the hard
195 * counter was 0, that means that there was no live shared
196 * pointer from which to try to share
197 */
198
199 if (atomic_fetch_add(&data->ref.hard, 1) > 0) {
200 /*
201 * the memory is live, add a reference
202 * to the shared data structure too
203 */
204 atomic_fetch_add(&data->ref.soft, 1);
205 } else {
206 /* the memory wasn't live, put the counter back */
207 atomic_fetch_sub(&data->ref.hard, 1);
208 cstl_guarded_ptr_set(&sp->data, NULL);
209 }
210
211 atomic_flag_clear(&data->ref.lock);
212 }
213}
214
216{
217 struct cstl_shared_ptr_data * const data =
218 cstl_guarded_ptr_get(&wp->data);
219
220 if (data != NULL) {
221 cstl_guarded_ptr_set(&wp->data, NULL);
222
223 if (atomic_fetch_sub(&data->ref.soft, 1) == 1) {
224 free(data);
225 }
226 }
227}
228
229#ifdef __cfg_test__
230// GCOV_EXCL_START
231#include "internal/check.h"
232
233START_TEST(guarded)
234{
237
238 p2 = p;
239
240 /*
241 * p should still be good after the copy,
242 * but p2 should detect that it's been copied
243 */
245 ck_assert_signal(SIGABRT, cstl_guarded_ptr_get(&p2));
246}
247END_TEST
248
249static void dtor_memclr(void * const mem, void * const len)
250{
251 memset(mem, 0xa5, (uintptr_t)len);
252}
253
254START_TEST(unique)
255{
257 cstl_xtor_func_t * dtor;
258 void * priv;
259
260 cstl_unique_ptr_alloc(&p, 512, dtor_memclr, (void *)512);
261 ck_assert_ptr_nonnull(cstl_unique_ptr_get(&p));
262
264 ck_assert_ptr_null(cstl_unique_ptr_get(&p));
265
266 cstl_unique_ptr_alloc(&p, 1024, dtor_memclr, (void *)1024);
267 ck_assert_ptr_nonnull(cstl_unique_ptr_get(&p));
268
269 free(cstl_unique_ptr_get(&p));
270 ck_assert_ptr_nonnull(cstl_unique_ptr_get(&p));
271
272 cstl_unique_ptr_release(&p, &dtor, &priv);
273 ck_assert_ptr_eq(cstl_unique_ptr_get(&p), NULL);
274 ck_assert_uint_eq((uintptr_t)dtor, (uintptr_t)dtor_memclr);
275 ck_assert_ptr_eq(priv, (void *)1024);
276
278 ck_assert_ptr_null(cstl_unique_ptr_get(&p));
279}
280END_TEST
281
282START_TEST(shared)
283{
286
287 cstl_shared_ptr_alloc(&sp1, 128, NULL);
288 memset(cstl_shared_ptr_get(&sp1), 0, 128);
289
290 cstl_shared_ptr_share(&sp1, &sp2);
291 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), cstl_shared_ptr_get(&sp2));
292
294 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), NULL);
295 ck_assert_ptr_ne(cstl_shared_ptr_get(&sp1), cstl_shared_ptr_get(&sp2));
296
297 memset(cstl_shared_ptr_get(&sp2), 0, 128);
299}
300END_TEST
301
302START_TEST(weak)
303{
307
308 cstl_shared_ptr_alloc(&sp1, 128, NULL);
309 memset(cstl_shared_ptr_get(&sp1), 0, 128);
310
311 cstl_weak_ptr_from(&wp, &sp1);
312 cstl_shared_ptr_share(&sp1, &sp2);
313 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), cstl_shared_ptr_get(&sp2));
314
316 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), NULL);
317 ck_assert_ptr_ne(cstl_shared_ptr_get(&sp1), cstl_shared_ptr_get(&sp2));
318 memset(cstl_shared_ptr_get(&sp2), 0, 128);
319
320 cstl_weak_ptr_lock(&wp, &sp1);
321 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), cstl_shared_ptr_get(&sp2));
322
325
326 cstl_weak_ptr_lock(&wp, &sp1);
327 ck_assert_ptr_eq(cstl_shared_ptr_get(&sp1), NULL);
328
330}
331END_TEST
332
333Suite * memory_suite(void)
334{
335 Suite * const s = suite_create("memory");
336
337 TCase * tc;
338
339 tc = tcase_create("memory");
340 tcase_add_test(tc, guarded);
341 tcase_add_test(tc, unique);
342 tcase_add_test(tc, shared);
343 tcase_add_test(tc, weak);
344 suite_add_tcase(s, tc);
345
346 return s;
347}
348
349// GCOV_EXCL_STOP
350#endif
void cstl_xtor_func_t(void *obj, void *priv)
Type for functions called to construct, clear, or destroy an object.
Definition common.h:97
static void cstl_guarded_ptr_set(struct cstl_guarded_ptr *const gp, void *const ptr)
Initialize a guarded pointer object to a specific pointer value.
Definition memory.h:110
#define DECLARE_CSTL_GUARDED_PTR(NAME)
Declare and initialize a guarded pointer.
Definition memory.h:84
static void cstl_guarded_ptr_copy(struct cstl_guarded_ptr *const dst, const struct cstl_guarded_ptr *const src)
Copy the cstl_guarded_ptr object to a new location.
Definition memory.h:163
static const void * cstl_guarded_ptr_get_const(const struct cstl_guarded_ptr *const gp)
Retrieve the stored pointer value.
Definition memory.h:138
static void * cstl_guarded_ptr_get(struct cstl_guarded_ptr *const gp)
Retrieve the stored pointer value.
Definition memory.h:148
void cstl_shared_ptr_reset(cstl_shared_ptr_t *const sp)
Stop managing the underlying memory via this object.
Definition memory.c:128
bool cstl_shared_ptr_unique(const cstl_shared_ptr_t *const sp)
Determine if a shared pointer uniquely owns the underlying memory.
Definition memory.c:92
static void * cstl_shared_ptr_get(cstl_shared_ptr_t *const sp)
Get a pointer to the memory managed by the object.
Definition memory.h:450
void cstl_shared_ptr_share(const cstl_shared_ptr_t *const e, cstl_shared_ptr_t *const n)
Create a new shared pointer object to manage the underlying memory.
Definition memory.c:113
void cstl_shared_ptr_alloc(cstl_shared_ptr_t *const sp, const size_t sz, cstl_xtor_func_t *const clr)
Dynamically allocated memory to be shared via the object.
Definition memory.c:65
#define DECLARE_CSTL_SHARED_PTR(NAME)
Compile-time declaration and initialization of a shared pointer.
Definition memory.h:389
const void * cstl_shared_ptr_get_const(const cstl_shared_ptr_t *const sp)
Get a pointer to the memory managed by the object.
Definition memory.c:103
static void cstl_unique_ptr_init(cstl_unique_ptr_t *const up)
Initialize a unique pointer.
Definition memory.h:249
static void * cstl_unique_ptr_get(cstl_unique_ptr_t *const up)
Get the pointer managed by the unique pointer object.
Definition memory.h:288
void cstl_unique_ptr_reset(cstl_unique_ptr_t *const up)
Free the memory managed by a unique pointer.
Definition memory.c:55
void cstl_unique_ptr_alloc(cstl_unique_ptr_t *const up, const size_t sz, cstl_xtor_func_t *const clr, void *const priv)
Dynamically allocate memory to be managed by the unique pointer.
Definition memory.c:41
static void * cstl_unique_ptr_release(cstl_unique_ptr_t *const up, cstl_xtor_func_t **const clr, void **priv)
Stop a unique pointer object from managing a pointer.
Definition memory.h:311
static const void * cstl_unique_ptr_get_const(const cstl_unique_ptr_t *const up)
Get the pointer managed by the unique pointer object.
Definition memory.h:281
#define DECLARE_CSTL_UNIQUE_PTR(NAME)
Declare and initialize a unique pointer.
Definition memory.h:226
#define DECLARE_CSTL_WEAK_PTR(NAME)
Compile-time declaration and initialization of a weak pointer.
Definition memory.h:519
void cstl_weak_ptr_reset(cstl_weak_ptr_t *const wp)
Drop the reference to the underlying managed memory.
Definition memory.c:215
void cstl_weak_ptr_from(cstl_weak_ptr_t *const wp, const cstl_shared_ptr_t *const sp)
Create a weak pointer from a shared pointer.
Definition memory.c:146
void cstl_weak_ptr_lock(const cstl_weak_ptr_t *const wp, cstl_shared_ptr_t *const sp)
Convert a weak pointer to a shared pointer.
Definition memory.c:160
The shared pointer object.
Definition memory.h:396
A pointer that has a single "owner".
Definition memory.h:234