-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathsimple-gc-embedder.h
183 lines (165 loc) · 6.77 KB
/
simple-gc-embedder.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
#include <stdatomic.h>
#include "simple-tagging-scheme.h"
#include "simple-roots-types.h"
#include "gc-config.h"
#include "gc-embedder-api.h"
#define GC_EMBEDDER_EPHEMERON_HEADER struct gc_header header;
#define GC_EMBEDDER_FINALIZER_HEADER struct gc_header header;
static inline size_t gc_finalizer_priority_count(void) { return 2; }
static inline int
gc_is_valid_conservative_ref_displacement(uintptr_t displacement) {
#if GC_CONSERVATIVE_ROOTS || GC_CONSERVATIVE_TRACE
// Here is where you would allow tagged heap object references.
return displacement == 0;
#else
// Shouldn't get here.
GC_CRASH();
#endif
}
// No external objects in simple benchmarks.
static inline int gc_extern_space_visit(struct gc_extern_space *space,
struct gc_edge edge,
struct gc_ref ref) {
GC_CRASH();
}
static inline void gc_extern_space_start_gc(struct gc_extern_space *space,
int is_minor_gc) {
}
static inline void gc_extern_space_finish_gc(struct gc_extern_space *space,
int is_minor_gc) {
}
static inline void gc_trace_object(struct gc_ref ref,
void (*trace_edge)(struct gc_edge edge,
struct gc_heap *heap,
void *trace_data),
struct gc_heap *heap,
void *trace_data,
size_t *size) {
#if GC_CONSERVATIVE_TRACE
// Shouldn't get here.
GC_CRASH();
#else
switch (tag_live_alloc_kind(*tag_word(ref))) {
#define SCAN_OBJECT(name, Name, NAME) \
case ALLOC_KIND_##NAME: \
if (trace_edge) \
visit_##name##_fields(gc_ref_heap_object(ref), trace_edge, \
heap, trace_data); \
if (size) \
*size = name##_size(gc_ref_heap_object(ref)); \
break;
FOR_EACH_HEAP_OBJECT_KIND(SCAN_OBJECT)
#undef SCAN_OBJECT
default:
GC_CRASH();
}
#endif
}
static inline void visit_roots(struct handle *roots,
void (*trace_edge)(struct gc_edge edge,
struct gc_heap *heap,
void *trace_data),
struct gc_heap *heap,
void *trace_data) {
for (struct handle *h = roots; h; h = h->next)
trace_edge(gc_edge(&h->v), heap, trace_data);
}
static inline void gc_trace_mutator_roots(struct gc_mutator_roots *roots,
void (*trace_edge)(struct gc_edge edge,
struct gc_heap *heap,
void *trace_data),
struct gc_heap *heap,
void *trace_data) {
if (roots)
visit_roots(roots->roots, trace_edge, heap, trace_data);
}
static inline void gc_trace_heap_roots(struct gc_heap_roots *roots,
void (*trace_edge)(struct gc_edge edge,
struct gc_heap *heap,
void *trace_data),
struct gc_heap *heap,
void *trace_data) {
if (roots)
visit_roots(roots->roots, trace_edge, heap, trace_data);
}
static inline uintptr_t gc_object_forwarded_nonatomic(struct gc_ref ref) {
uintptr_t tag = *tag_word(ref);
return (tag & gcobj_not_forwarded_bit) ? 0 : tag;
}
static inline void gc_object_forward_nonatomic(struct gc_ref ref,
struct gc_ref new_ref) {
*tag_word(ref) = gc_ref_value(new_ref);
}
static inline struct gc_atomic_forward
gc_atomic_forward_begin(struct gc_ref ref) {
uintptr_t tag = atomic_load_explicit(tag_word(ref), memory_order_acquire);
enum gc_forwarding_state state;
if (tag == gcobj_busy)
state = GC_FORWARDING_STATE_BUSY;
else if (tag & gcobj_not_forwarded_bit)
state = GC_FORWARDING_STATE_NOT_FORWARDED;
else
state = GC_FORWARDING_STATE_FORWARDED;
return (struct gc_atomic_forward){ ref, tag, state };
}
static inline int
gc_atomic_forward_retry_busy(struct gc_atomic_forward *fwd) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_BUSY);
uintptr_t tag = atomic_load_explicit(tag_word(fwd->ref),
memory_order_acquire);
if (tag == gcobj_busy)
return 0;
if (tag & gcobj_not_forwarded_bit) {
fwd->state = GC_FORWARDING_STATE_NOT_FORWARDED;
fwd->data = tag;
} else {
fwd->state = GC_FORWARDING_STATE_FORWARDED;
fwd->data = tag;
}
return 1;
}
static inline void
gc_atomic_forward_acquire(struct gc_atomic_forward *fwd) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_NOT_FORWARDED);
if (atomic_compare_exchange_strong(tag_word(fwd->ref), &fwd->data,
gcobj_busy))
fwd->state = GC_FORWARDING_STATE_ACQUIRED;
else if (fwd->data == gcobj_busy)
fwd->state = GC_FORWARDING_STATE_BUSY;
else {
GC_ASSERT((fwd->data & gcobj_not_forwarded_bit) == 0);
fwd->state = GC_FORWARDING_STATE_FORWARDED;
}
}
static inline void
gc_atomic_forward_abort(struct gc_atomic_forward *fwd) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_ACQUIRED);
atomic_store_explicit(tag_word(fwd->ref), fwd->data, memory_order_release);
fwd->state = GC_FORWARDING_STATE_NOT_FORWARDED;
}
static inline size_t
gc_atomic_forward_object_size(struct gc_atomic_forward *fwd) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_ACQUIRED);
switch (tag_live_alloc_kind(fwd->data)) {
#define OBJECT_SIZE(name, Name, NAME) \
case ALLOC_KIND_##NAME: \
return name##_size(gc_ref_heap_object(fwd->ref));
FOR_EACH_HEAP_OBJECT_KIND(OBJECT_SIZE)
#undef OBJECT_SIZE
default:
GC_CRASH();
}
}
static inline void
gc_atomic_forward_commit(struct gc_atomic_forward *fwd, struct gc_ref new_ref) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_ACQUIRED);
*tag_word(new_ref) = fwd->data;
atomic_store_explicit(tag_word(fwd->ref), gc_ref_value(new_ref),
memory_order_release);
fwd->state = GC_FORWARDING_STATE_FORWARDED;
}
static inline uintptr_t
gc_atomic_forward_address(struct gc_atomic_forward *fwd) {
GC_ASSERT(fwd->state == GC_FORWARDING_STATE_FORWARDED);
return fwd->data;
}