1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
|
#include "../gc.h"
#include "../util.h"
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
struct object_header {
bool mark : 1;
bool is_compiled_function : 1;
int rsvd : 1;
uint16_t untraced_slot_count : 13;
uint16_t value_slot_count;
struct object_header *next;
uintptr_t slots[];
};
/**
* A singly-linked list of live objects.
*/
static struct object_header *live_objects = NULL;
/**
* The number of bytes of live data, including object headers.
*/
static size_t live_bytes = 0;
/**
* The number of bytes of live data to reach before collecting.
*/
static size_t limit_bytes = 80 * 1024 * 1024;
/**
* The root stack is an arbitrary 4KiB. It shouldn't get anywhere near this
* deep, so this is simply a convenient size.
*/
static struct value *root_stack[4096 / sizeof(uintptr_t)] = {0};
static size_t root_stack_depth = 0;
static struct object_header *hdr(void *ptr) {
return &((struct object_header *)ptr)[-1];
}
static const struct object_header *hdrc(const void *ptr) {
return &((const struct object_header *)ptr)[-1];
}
void gc_init(void) {}
static void gc_mark(const struct value initial_value) {
// If the initial value just wasn't a pointer, we don't need to mark it.
if (!is_ptr(initial_value))
return;
struct object *obj = untag_ptr(initial_value);
if (!obj)
return;
// Otherwise, we do a traversal starting at the value, doing pointer reversal
// to avoid recursion.
//
// In simple recursive marking, we store where we came from in the call
// stack. Each stack frame effectively stores:
//
// 1. the pointer to the object we came from
// 2. the index of the slot we came from in that object
//
// In pointer reversal, instead of using that space, we store where we came
// from in the previous object itself. This relies on having the
// TAG_GC_INTERNAL tag.
struct object *prev = NULL;
while (obj) {
// Check if the object has already been marked. If so, we don't need to
// traverse it.
struct object_header *header = hdr(obj);
if (!header->mark) {
// Invariant: We've just started traversing this object, so none of its
// fields should have TAG_GC_INTERNAL.
// Check the object's fields, looking for one that
todo("gc_mark 1");
}
todo("gc_mark 2");
}
/*
struct object_header *header = hdr(obj);
if (header->mark)
return;
header->mark = true;
struct value *slots = (struct value *)&header->slots[0];
for (size_t i = 0; i < header->value_slot_count; i++)
gc_mark(slots[i]);
*/
}
static void gc_sweep(void) {
struct object_header **head = &live_objects, *header;
while ((header = *head)) {
if ((*head)->mark) {
(*head)->mark = false;
head = &header->next;
} else {
*head = header->next;
const size_t total_bytes =
sizeof(struct object_header) +
(header->value_slot_count + header->untraced_slot_count) *
sizeof(uintptr_t);
free(header);
live_bytes -= total_bytes;
}
}
}
void gc_collect(void) {
fprintf(stderr, "gc_collect()... ");
for (size_t i = 0; i < root_stack_depth; i++)
gc_mark(*root_stack[i]);
const size_t before = live_bytes;
gc_sweep();
const size_t after = live_bytes;
fprintf(stderr, "collected %zu bytes\n", before - after);
assume(live_bytes < limit_bytes);
}
struct object *gc_alloc(size_t value_slot_count, size_t untraced_slot_count) {
assume(value_slot_count < (1 << 16));
assume(untraced_slot_count < (1 << 13));
const size_t total_bytes =
sizeof(struct object_header) +
(value_slot_count + untraced_slot_count) * sizeof(uintptr_t);
if ((live_bytes += total_bytes) >= limit_bytes)
gc_collect();
struct object_header *header = malloc(total_bytes);
if (!header) {
gc_collect();
header = malloc(total_bytes);
if (!header)
todo("OOM");
}
memset(header, 0, total_bytes);
header->value_slot_count = value_slot_count;
header->untraced_slot_count = untraced_slot_count;
header->next = live_objects;
live_objects = header;
return (struct object *)&header->slots[0];
}
struct object *gc_alloc_compiled_function(size_t value_slot_count,
size_t untraced_slot_count) {
struct object *obj = gc_alloc(value_slot_count, untraced_slot_count);
hdr(obj)->is_compiled_function = true;
return obj;
}
struct object *gc_alloc_hashtable_eq(size_t value_slot_count,
size_t untraced_slot_count) {
return gc_alloc(value_slot_count, untraced_slot_count);
}
struct value gc_read_value_slot(const struct object *obj, size_t slot_index) {
assume(slot_index < hdrc(obj)->value_slot_count);
struct value *slots = (struct value *)obj;
struct value value = slots[slot_index];
return value;
}
uintptr_t gc_read_untraced_slot(const struct object *obj, size_t slot_index) {
assume(slot_index < hdrc(obj)->untraced_slot_count);
todo("gc_read_untraced_slot");
}
uint8_t gc_read_untraced_byte(const struct object *obj, size_t byte_index) {
todo("gc_read_untraced_byte");
}
void gc_write_value_slot(struct object *obj, size_t slot_index,
struct value value) {
struct value *slots = (struct value *)obj;
slots[slot_index] = value;
}
void gc_write_untraced_slot(struct object *obj, size_t slot_index,
uintptr_t value) {
assume(slot_index < hdrc(obj)->untraced_slot_count);
todo("gc_write_untraced_slot");
}
void gc_write_untraced_byte(struct object *obj, size_t byte_index,
uint8_t byte) {
todo("gc_write_untraced_byte");
}
void gc_root_push(struct value *root) {
assume(root_stack_depth < sizeof(root_stack));
root_stack[root_stack_depth++] = root;
}
void gc_root_pop(void) {
assume(root_stack_depth > 0);
root_stack_depth--;
}
void gc_debug(void) { fprintf(stderr, "live bytes = %zu\n", live_bytes); }
|