aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/alloc.h4
-rw-r--r--src/arena.c62
-rw-r--r--src/codegen.c64
3 files changed, 98 insertions, 32 deletions
diff --git a/src/alloc.h b/src/alloc.h
index 79d7c0b..bc4bd69 100644
--- a/src/alloc.h
+++ b/src/alloc.h
@@ -37,6 +37,10 @@ void *_arena_grow(arena_t *a, void *ptr, size_t old_nmemb, size_t new_nmemb,
size_t size, size_t align)
__attribute__((returns_nonnull, nonnull, warn_unused_result));
+typedef void *snapshot_t;
+snapshot_t arena_snapshot_create(arena_t);
+void arena_snapshot_restore(arena_t *, snapshot_t);
+
/* Deallocate all memory associated with the arena A. */
void arena_free(arena_t *a)
__attribute__((nonnull));
diff --git a/src/arena.c b/src/arena.c
index 880befd..525acfe 100644
--- a/src/arena.c
+++ b/src/arena.c
@@ -29,7 +29,7 @@ struct _arena {
/* DATA points to the start of the block’s memory while FREE points
to the beginning of the unused data in the block */
void *data, *free;
- size_t len, cap;
+ size_t cap;
struct _arena *next;
};
@@ -56,23 +56,22 @@ arena_alloc(struct _arena **a, size_t nmemb, size_t size, size_t align)
size *= nmemb;
for (struct _arena *p = *a; p != NULL; p = p->next) {
- size_t nlen, off;
- off = pad(p->len, align);
- nlen = size + off;
-
- if (nlen <= p->cap) {
- void *ret = (char *)p->data + off;
- p->len = nlen;
- p->free = ret;
+ size_t padding = pad((char *)p->free - (char *)p->data, align);
+ size_t freespc = p->cap - ((char *)p->free - (char *)p->data);
+ size_t nsize = size + padding;
+
+ if (nsize <= freespc) {
+ void *ret = p->free;
+ p->free = (char *)p->free + nsize;
return ret;
}
}
/* No page exists with enough space */
struct _arena *p = mkblk(MAX(size, ARENA_DFLT_CAP));
- p->len = size;
p->next = *a;
*a = p;
+ p->free = (char *)p->data + size;
return p->data;
}
@@ -82,14 +81,13 @@ _arena_grow(arena_t *a, void *ptr, size_t old_nmemb, size_t new_nmemb,
{
assert(IS_POW_2(align));
assert(new_nmemb * size != 0);
+ assert(old_nmemb < new_nmemb);
if (unlikely(size > SIZE_MAX / new_nmemb)) {
errno = ENOMEM;
err("%s:", __func__);
}
- size *= new_nmemb;
-
for (struct _arena *p = *a; p != NULL; p = p->next) {
if (ptr < p->data || ptr > p->free)
continue;
@@ -97,11 +95,12 @@ _arena_grow(arena_t *a, void *ptr, size_t old_nmemb, size_t new_nmemb,
/* If we need to grow the given allocation, but it was the last
allocation made in a region, then we first see if we can just eat
more trailing free space in the region to avoid a memcpy(). */
- if (ptr == p->free) {
- size_t rest = p->cap - p->len;
+ size_t oldsz = old_nmemb * size;
+ if ((char *)ptr == (char *)p->free - oldsz) {
+ size_t rest = p->cap - ((char *)p->free - (char *)p->data);
size_t need = (new_nmemb - old_nmemb) * size;
if (need <= rest) {
- p->len += need;
+ p->free = (char *)p->free + need;
return ptr;
}
}
@@ -130,6 +129,34 @@ arena_free(struct _arena **a)
*a = NULL;
}
+snapshot_t
+arena_snapshot_create(struct _arena *a)
+{
+ return a == NULL ? NULL : a->free;
+}
+
+void
+arena_snapshot_restore(struct _arena **a, snapshot_t snp)
+{
+ if (snp == NULL) {
+ arena_free(a);
+ return;
+ }
+
+ struct _arena *cur, *next;
+ for (cur = *a; cur != NULL; cur = next) {
+ next = cur->next;
+ if (snp < cur->data || snp > cur->free) {
+ munmap(cur->data, cur->cap);
+ free(cur);
+ } else {
+ cur->free = snp;
+ *a = cur;
+ return;
+ }
+ }
+}
+
struct _arena *
mkblk(size_t cap)
{
@@ -137,11 +164,10 @@ mkblk(size_t cap)
if (a == NULL)
err("malloc:");
a->cap = cap;
- a->data = mmap(NULL, cap, PROT_READ | PROT_WRITE,
- MAP_PRIVATE | MAP_ANON, -1, 0);
+ a->data = a->free = mmap(NULL, cap, PROT_READ | PROT_WRITE,
+ MAP_PRIVATE | MAP_ANON, -1, 0);
if (a->data == MAP_FAILED)
err("mmap:");
- a->free = a->data;
return a;
}
diff --git a/src/codegen.c b/src/codegen.c
index 9a59476..8218bc3 100644
--- a/src/codegen.c
+++ b/src/codegen.c
@@ -35,6 +35,7 @@ struct cgctx {
LLVMBuilderRef bob;
LLVMValueRef func;
+ idx_t scpi;
strview_t namespace;
};
@@ -126,7 +127,12 @@ codegentypedexpr(struct cgctx ctx, idx_t i, type_t type, LLVMValueRef *outv)
}
assert(ctx.ast.kinds[i] == ASTIDENT);
- err("%s():%d: not implemented", __func__, __LINE__);
+
+ strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
+ LLVMTypeRef t = type2llvm(ctx, ctx.types[i]);
+ LLVMValueRef ptrval = symtab_insert(&ctx.scps[ctx.scpi].map, sv, NULL)->v;
+ *outv = LLVMBuildLoad2(ctx.bob, t, ptrval, "loadtmp");
+ return fwdnode(ctx.ast, i);
}
idx_t
@@ -157,12 +163,39 @@ idx_t
codegenblk(struct cgctx ctx, idx_t i)
{
pair_t p = ctx.ast.kids[i];
+ while (ctx.scps[ctx.scpi].i != p.lhs)
+ ctx.scpi++;
for (i = p.lhs; i <= p.rhs; i = codegenstmt(ctx, i))
;
return i;
}
idx_t
+codegenalloca(struct cgctx ctx, idx_t i)
+{
+ pair_t p = ctx.ast.kids[i];
+ while (ctx.scps[ctx.scpi].i != p.lhs)
+ ctx.scpi++;
+ for (i = p.lhs; i <= p.rhs;) {
+ switch (ctx.ast.kinds[i]) {
+ case ASTBLK:
+ i = codegenalloca(ctx, i);
+ break;
+ case ASTDECL: {
+ strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
+ uchar *name = tmpalloc(ctx.s, sv.len + 1, 1);
+ LLVMTypeRef t = type2llvm(ctx, ctx.types[i]);
+ symtab_insert(&ctx.scps[ctx.scpi].map, sv, NULL)->v =
+ LLVMBuildAlloca(ctx.bob, t, svtocstr(name, sv));
+ } /* fallthrough */
+ default:
+ i = fwdnode(ctx.ast, i);
+ }
+ }
+ return i;
+}
+
+idx_t
codegenfunc(struct cgctx ctx, idx_t i, const char *name)
{
LLVMTypeRef ret = ctx.types[i].ret == NULL
@@ -171,13 +204,19 @@ codegenfunc(struct cgctx ctx, idx_t i, const char *name)
LLVMTypeRef ft = LLVMFunctionType(ret, NULL, 0, false);
ctx.func = LLVMAddFunction(ctx.mod, name, ft);
- LLVMBasicBlockRef entry = LLVMAppendBasicBlockInContext(ctx.ctx, ctx.func,
- "entry");
+ LLVMBasicBlockRef entry =
+ LLVMAppendBasicBlockInContext(ctx.ctx, ctx.func, "entry");
LLVMPositionBuilderAtEnd(ctx.bob, entry);
- pair_t p = ctx.ast.kids[i];
- i = codegenblk(ctx, p.rhs);
- if (ctx.ast.kids[p.lhs].rhs == AST_EMPTY)
+ idx_t proto = ctx.ast.kids[i].lhs;
+ idx_t blk = ctx.ast.kids[i].rhs;
+
+ snapshot_t snap = arena_snapshot_create(*ctx.a);
+ (void)codegenalloca(ctx, blk);
+ arena_snapshot_restore(ctx.a, snap);
+
+ i = codegenblk(ctx, blk);
+ if (ctx.ast.kids[proto].rhs == AST_EMPTY)
LLVMBuildRetVoid(ctx.bob);
return i;
}
@@ -193,11 +232,10 @@ codegendecl(struct cgctx ctx, idx_t i)
if (ctx.ast.kinds[p.rhs] != ASTFN)
return fwdnode(ctx.ast, i);
- strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
/* TODO: Namespace the name */
+ strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
char *name = tmpalloc(ctx.s, sv.len + 1, 1);
- svtocstr(name, sv);
- return codegenfunc(ctx, p.rhs, name);
+ return codegenfunc(ctx, p.rhs, svtocstr(name, sv));
}
assert(ctx.ast.kinds[i] == ASTDECL);
@@ -216,12 +254,10 @@ codegendecl(struct cgctx ctx, idx_t i)
return i;
}
if (!ctx.types[i].isfloat /* && !aux.buf[p.lhs].decl.isstatic */) {
- strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
- /* TODO: Namespace the name */
- char *name = tmpalloc(ctx.s, sv.len + 1, 1);
- LLVMTypeRef t = type2llvm(ctx, ctx.types[i]);
LLVMValueRef var, val;
- var = LLVMBuildAlloca(ctx.bob, t, svtocstr(name, sv));
+ /* TODO: Namespace the name */
+ strview_t sv = ctx.toks.strs[ctx.ast.lexemes[i]];
+ var = symtab_insert(&ctx.scps[ctx.scpi].map, sv, NULL)->v;
i = codegentypedexpr(ctx, p.rhs, ctx.types[i], &val);
LLVMBuildStore(ctx.bob, val, var);
return i;