1
0
mirror of https://github.com/janet-lang/janet synced 2024-11-29 03:19:54 +00:00
janet/src/core/gc.c

625 lines
20 KiB
C
Raw Normal View History

/*
2022-03-21 23:22:59 +00:00
* Copyright (c) 2022 Calvin Rose
2017-07-02 01:51:16 +00:00
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
2017-07-02 01:51:16 +00:00
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
2017-07-02 01:51:16 +00:00
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#ifndef JANET_AMALG
#include "features.h"
#include <janet.h>
#include "state.h"
#include "symcache.h"
#include "gc.h"
#include "util.h"
#include "fiber.h"
#include "vector.h"
#endif
2017-11-01 21:53:43 +00:00
/* Helpers for marking the various gc types */
2018-09-06 02:18:42 +00:00
static void janet_mark_funcenv(JanetFuncEnv *env);
static void janet_mark_funcdef(JanetFuncDef *def);
static void janet_mark_function(JanetFunction *func);
static void janet_mark_array(JanetArray *array);
static void janet_mark_table(JanetTable *table);
static void janet_mark_struct(const JanetKV *st);
static void janet_mark_tuple(const Janet *tuple);
static void janet_mark_buffer(JanetBuffer *buffer);
static void janet_mark_string(const uint8_t *str);
static void janet_mark_fiber(JanetFiber *fiber);
static void janet_mark_abstract(void *adata);
2017-11-01 21:53:43 +00:00
/* Local state that is only temporary for gc */
static JANET_THREAD_LOCAL uint32_t depth = JANET_RECURSION_GUARD;
2020-01-03 04:12:07 +00:00
static JANET_THREAD_LOCAL size_t orig_rootcount;
2020-01-23 02:52:35 +00:00
/* Hint to the GC that we may need to collect */
void janet_gcpressure(size_t s) {
janet_vm.next_collection += s;
2020-01-23 02:52:35 +00:00
}
2017-11-01 21:53:43 +00:00
/* Mark a value */
2018-09-23 21:10:41 +00:00
void janet_mark(Janet x) {
if (depth) {
depth--;
2018-09-23 21:10:41 +00:00
switch (janet_type(x)) {
default:
break;
2018-09-23 21:10:41 +00:00
case JANET_STRING:
case JANET_KEYWORD:
case JANET_SYMBOL:
janet_mark_string(janet_unwrap_string(x));
break;
case JANET_FUNCTION:
janet_mark_function(janet_unwrap_function(x));
break;
case JANET_ARRAY:
janet_mark_array(janet_unwrap_array(x));
break;
case JANET_TABLE:
janet_mark_table(janet_unwrap_table(x));
break;
case JANET_STRUCT:
janet_mark_struct(janet_unwrap_struct(x));
break;
case JANET_TUPLE:
janet_mark_tuple(janet_unwrap_tuple(x));
break;
case JANET_BUFFER:
janet_mark_buffer(janet_unwrap_buffer(x));
break;
case JANET_FIBER:
janet_mark_fiber(janet_unwrap_fiber(x));
break;
case JANET_ABSTRACT:
janet_mark_abstract(janet_unwrap_abstract(x));
break;
2018-09-23 21:10:41 +00:00
}
depth++;
2018-09-23 21:10:41 +00:00
} else {
janet_gcroot(x);
2018-09-23 21:10:41 +00:00
}
}
2018-09-06 02:18:42 +00:00
static void janet_mark_string(const uint8_t *str) {
janet_gc_mark(janet_string_head(str));
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_buffer(JanetBuffer *buffer) {
janet_gc_mark(buffer);
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_abstract(void *adata) {
2021-08-20 01:56:48 +00:00
#ifdef JANET_EV
/* Check if abstract type is a threaded abstract type. If it is, marking means
* updating the threaded_abstract table. */
if ((janet_abstract_head(adata)->gc.flags & JANET_MEM_TYPEBITS) == JANET_MEMORY_THREADED_ABSTRACT) {
janet_table_put(&janet_vm.threaded_abstracts, janet_wrap_abstract(adata), janet_wrap_true());
return;
}
#endif
if (janet_gc_reachable(janet_abstract_head(adata)))
return;
janet_gc_mark(janet_abstract_head(adata));
if (janet_abstract_head(adata)->type->gcmark) {
janet_abstract_head(adata)->type->gcmark(adata, janet_abstract_size(adata));
}
2017-11-01 21:53:43 +00:00
}
/* Mark a bunch of items in memory */
2018-09-06 02:18:42 +00:00
static void janet_mark_many(const Janet *values, int32_t n) {
if (values == NULL)
return;
2018-09-06 02:18:42 +00:00
const Janet *end = values + n;
2018-09-23 21:53:55 +00:00
while (values < end) {
janet_mark(*values);
values += 1;
}
2017-11-01 21:53:43 +00:00
}
/* Mark a bunch of key values items in memory */
2018-09-06 02:18:42 +00:00
static void janet_mark_kvs(const JanetKV *kvs, int32_t n) {
const JanetKV *end = kvs + n;
while (kvs < end) {
2018-09-06 02:18:42 +00:00
janet_mark(kvs->key);
janet_mark(kvs->value);
kvs++;
}
}
2018-09-06 02:18:42 +00:00
static void janet_mark_array(JanetArray *array) {
if (janet_gc_reachable(array))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(array);
janet_mark_many(array->data, array->count);
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_table(JanetTable *table) {
recur: /* Manual tail recursion */
2018-09-06 02:18:42 +00:00
if (janet_gc_reachable(table))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(table);
janet_mark_kvs(table->data, table->capacity);
if (table->proto) {
table = table->proto;
goto recur;
}
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_struct(const JanetKV *st) {
recur:
if (janet_gc_reachable(janet_struct_head(st)))
2017-11-01 21:53:43 +00:00
return;
janet_gc_mark(janet_struct_head(st));
2018-09-06 02:18:42 +00:00
janet_mark_kvs(st, janet_struct_capacity(st));
st = janet_struct_proto(st);
if (st) goto recur;
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_tuple(const Janet *tuple) {
if (janet_gc_reachable(janet_tuple_head(tuple)))
2017-11-01 21:53:43 +00:00
return;
janet_gc_mark(janet_tuple_head(tuple));
2018-09-06 02:18:42 +00:00
janet_mark_many(tuple, janet_tuple_length(tuple));
2017-05-03 23:57:06 +00:00
}
/* Helper to mark function environments */
2018-09-06 02:18:42 +00:00
static void janet_mark_funcenv(JanetFuncEnv *env) {
if (janet_gc_reachable(env))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(env);
/* If closure env references a dead fiber, we can just copy out the stack frame we need so
* we don't need to keep around the whole dead fiber. */
janet_env_maybe_detach(env);
if (env->offset > 0) {
/* On stack */
2018-09-06 02:18:42 +00:00
janet_mark_fiber(env->as.fiber);
} else {
/* Not on stack */
2018-09-06 02:18:42 +00:00
janet_mark_many(env->as.values, env->length);
}
}
/* GC helper to mark a FuncDef */
2018-09-06 02:18:42 +00:00
static void janet_mark_funcdef(JanetFuncDef *def) {
int32_t i;
2018-09-06 02:18:42 +00:00
if (janet_gc_reachable(def))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(def);
janet_mark_many(def->constants, def->constants_length);
for (i = 0; i < def->defs_length; ++i) {
2018-09-06 02:18:42 +00:00
janet_mark_funcdef(def->defs[i]);
}
if (def->source)
2018-09-06 02:18:42 +00:00
janet_mark_string(def->source);
if (def->name)
2018-09-06 02:18:42 +00:00
janet_mark_string(def->name);
}
2018-09-06 02:18:42 +00:00
static void janet_mark_function(JanetFunction *func) {
int32_t i;
int32_t numenvs;
2018-09-06 02:18:42 +00:00
if (janet_gc_reachable(func))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(func);
if (NULL != func->def) {
/* this should always be true, except if function is only partially constructed */
numenvs = func->def->environments_length;
for (i = 0; i < numenvs; ++i) {
janet_mark_funcenv(func->envs[i]);
}
janet_mark_funcdef(func->def);
}
2017-11-01 21:53:43 +00:00
}
2018-09-06 02:18:42 +00:00
static void janet_mark_fiber(JanetFiber *fiber) {
int32_t i, j;
2018-09-06 02:18:42 +00:00
JanetStackFrame *frame;
recur:
2018-09-06 02:18:42 +00:00
if (janet_gc_reachable(fiber))
2017-11-01 21:53:43 +00:00
return;
2018-09-06 02:18:42 +00:00
janet_gc_mark(fiber);
2018-12-30 23:23:29 +00:00
janet_mark(fiber->last_value);
2018-12-30 23:23:29 +00:00
/* Mark values on the argument stack */
janet_mark_many(fiber->data + fiber->stackstart,
fiber->stacktop - fiber->stackstart);
2018-12-30 23:23:29 +00:00
i = fiber->frame;
2018-09-06 02:18:42 +00:00
j = fiber->stackstart - JANET_FRAME_SIZE;
while (i > 0) {
2018-09-06 02:18:42 +00:00
frame = (JanetStackFrame *)(fiber->data + i - JANET_FRAME_SIZE);
if (NULL != frame->func)
2018-09-06 02:18:42 +00:00
janet_mark_function(frame->func);
if (NULL != frame->env)
2018-09-06 02:18:42 +00:00
janet_mark_funcenv(frame->env);
/* Mark all values in the stack frame */
2018-09-06 02:18:42 +00:00
janet_mark_many(fiber->data + i, j - i);
j = i - JANET_FRAME_SIZE;
i = frame->prevframe;
}
if (fiber->env)
janet_mark_table(fiber->env);
#ifdef JANET_EV
if (fiber->supervisor_channel) {
janet_mark_abstract(fiber->supervisor_channel);
}
#endif
/* Explicit tail recursion */
if (fiber->child) {
fiber = fiber->child;
goto recur;
}
}
2017-11-01 21:53:43 +00:00
/* Deinitialize a block of memory */
static void janet_deinit_block(JanetGCObject *mem) {
switch (mem->flags & JANET_MEM_TYPEBITS) {
2017-07-02 01:51:16 +00:00
default:
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_FUNCTION:
break; /* Do nothing for non gc types */
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_SYMBOL:
janet_symbol_deinit(((JanetStringHead *) mem)->data);
break;
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_ARRAY:
janet_free(((JanetArray *) mem)->data);
break;
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_TABLE:
janet_free(((JanetTable *) mem)->data);
2017-03-07 20:29:40 +00:00
break;
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_FIBER:
janet_free(((JanetFiber *)mem)->data);
2017-04-15 20:05:59 +00:00
break;
2018-09-06 02:18:42 +00:00
case JANET_MEMORY_BUFFER:
janet_buffer_deinit((JanetBuffer *) mem);
break;
case JANET_MEMORY_ABSTRACT: {
JanetAbstractHead *head = (JanetAbstractHead *)mem;
if (head->type->gc) {
janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
}
}
break;
case JANET_MEMORY_FUNCENV: {
JanetFuncEnv *env = (JanetFuncEnv *)mem;
if (0 == env->offset)
janet_free(env->as.values);
}
break;
case JANET_MEMORY_FUNCDEF: {
JanetFuncDef *def = (JanetFuncDef *)mem;
/* TODO - get this all with one alloc and one free */
janet_free(def->defs);
janet_free(def->environments);
janet_free(def->constants);
janet_free(def->bytecode);
janet_free(def->sourcemap);
janet_free(def->closure_bitset);
}
break;
}
}
/* Iterate over all allocated memory, and free memory that is not
* marked as reachable. Flip the gc color flag for next sweep. */
2018-09-06 02:18:42 +00:00
void janet_sweep() {
JanetGCObject *previous = NULL;
JanetGCObject *current = janet_vm.blocks;
JanetGCObject *next;
while (NULL != current) {
2021-10-22 00:48:37 +00:00
next = current->data.next;
2018-09-06 02:18:42 +00:00
if (current->flags & (JANET_MEM_REACHABLE | JANET_MEM_DISABLED)) {
2017-11-01 21:53:43 +00:00
previous = current;
2018-09-06 02:18:42 +00:00
current->flags &= ~JANET_MEM_REACHABLE;
2017-11-01 21:53:43 +00:00
} else {
janet_vm.block_count--;
2018-09-06 02:18:42 +00:00
janet_deinit_block(current);
if (NULL != previous) {
2021-10-22 00:48:37 +00:00
previous->data.next = next;
} else {
janet_vm.blocks = next;
}
janet_free(current);
}
current = next;
}
2021-08-20 01:56:48 +00:00
#ifdef JANET_EV
/* Sweep threaded abstract types for references to decrement */
JanetKV *items = janet_vm.threaded_abstracts.data;
for (int32_t i = 0; i < janet_vm.threaded_abstracts.capacity; i++) {
if (janet_checktype(items[i].key, JANET_ABSTRACT)) {
/* If item was not visited during the mark phase, then this
* abstract type isn't present in the heap and needs its refcount
* decremented, and shouuld be removed from table. If the refcount is
* then 0, the item will be collected. This ensures that only one interpreter
* will clean up the threaded abstract. */
/* If not visited... */
if (!janet_truthy(items[i].value)) {
void *abst = janet_unwrap_abstract(items[i].key);
if (0 == janet_abstract_decref(abst)) {
/* Run finalizer */
JanetAbstractHead *head = janet_abstract_head(abst);
if (head->type->gc) {
janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
}
/* Mark as tombstone in place */
items[i].key = janet_wrap_nil();
items[i].value = janet_wrap_false();
janet_vm.threaded_abstracts.deleted++;
janet_vm.threaded_abstracts.count--;
/* Free memory */
janet_free(janet_abstract_head(abst));
}
}
/* Reset for next sweep */
items[i].value = janet_wrap_false();
}
}
#endif
}
2017-11-01 21:53:43 +00:00
/* Allocate some memory that is tracked for garbage collection */
2018-09-06 02:18:42 +00:00
void *janet_gcalloc(enum JanetMemoryType type, size_t size) {
JanetGCObject *mem;
/* Make sure everything is inited */
janet_assert(NULL != janet_vm.cache, "please initialize janet before use");
mem = janet_malloc(size);
2017-11-01 21:53:43 +00:00
/* Check for bad malloc */
if (NULL == mem) {
2018-09-06 02:18:42 +00:00
JANET_OUT_OF_MEMORY;
}
2017-11-01 21:53:43 +00:00
/* Configure block */
mem->flags = type;
2017-11-01 21:53:43 +00:00
/* Prepend block to heap list */
janet_vm.next_collection += size;
2021-10-22 00:48:37 +00:00
mem->data.next = janet_vm.blocks;
janet_vm.blocks = mem;
janet_vm.block_count++;
return (void *)mem;
2017-03-22 04:27:18 +00:00
}
static void free_one_scratch(JanetScratch *s) {
if (NULL != s->finalize) {
s->finalize((char *) s->mem);
}
janet_free(s);
}
/* Free all allocated scratch memory */
static void janet_free_all_scratch(void) {
for (size_t i = 0; i < janet_vm.scratch_len; i++) {
free_one_scratch(janet_vm.scratch_mem[i]);
}
janet_vm.scratch_len = 0;
}
static JanetScratch *janet_mem2scratch(void *mem) {
JanetScratch *s = (JanetScratch *)mem;
return s - 1;
}
/* Run garbage collection */
2018-09-06 02:18:42 +00:00
void janet_collect(void) {
2018-09-23 21:53:55 +00:00
uint32_t i;
if (janet_vm.gc_suspend) return;
depth = JANET_RECURSION_GUARD;
/* Try and prevent many major collections back to back.
* A full collection will take O(janet_vm.block_count) time.
* If we have a large heap, make sure our interval is not too
* small so we won't make many collections over it. This is just a
* heuristic for automatically changing the gc interval */
if (janet_vm.block_count * 8 > janet_vm.gc_interval) {
janet_vm.gc_interval = janet_vm.block_count * sizeof(JanetGCObject);
}
orig_rootcount = janet_vm.root_count;
2020-05-28 15:39:40 +00:00
#ifdef JANET_EV
janet_ev_mark();
2020-02-03 15:29:51 +00:00
#endif
janet_mark_fiber(janet_vm.root_fiber);
for (i = 0; i < orig_rootcount; i++)
janet_mark(janet_vm.roots[i]);
while (orig_rootcount < janet_vm.root_count) {
Janet x = janet_vm.roots[--janet_vm.root_count];
2018-09-23 21:10:41 +00:00
janet_mark(x);
}
2018-09-06 02:18:42 +00:00
janet_sweep();
janet_vm.next_collection = 0;
janet_free_all_scratch();
}
/* Add a root value to the GC. This prevents the GC from removing a value
* and all of its children. If gcroot is called on a value n times, unroot
* must also be called n times to remove it as a gc root. */
2018-09-06 02:18:42 +00:00
void janet_gcroot(Janet root) {
size_t newcount = janet_vm.root_count + 1;
if (newcount > janet_vm.root_capacity) {
2020-01-12 16:19:51 +00:00
size_t newcap = 2 * newcount;
janet_vm.roots = janet_realloc(janet_vm.roots, sizeof(Janet) * newcap);
if (NULL == janet_vm.roots) {
2018-09-23 21:53:55 +00:00
JANET_OUT_OF_MEMORY;
}
janet_vm.root_capacity = newcap;
2018-09-23 21:53:55 +00:00
}
janet_vm.roots[janet_vm.root_count] = root;
janet_vm.root_count = newcount;
}
/* Identity equality for GC purposes */
2018-09-06 02:18:42 +00:00
static int janet_gc_idequals(Janet lhs, Janet rhs) {
if (janet_type(lhs) != janet_type(rhs))
return 0;
2018-09-06 02:18:42 +00:00
switch (janet_type(lhs)) {
case JANET_BOOLEAN:
2018-09-06 02:18:42 +00:00
case JANET_NIL:
case JANET_NUMBER:
/* These values don't really matter to the gc so returning 1 all the time is fine. */
return 1;
default:
2018-09-06 02:18:42 +00:00
return janet_unwrap_pointer(lhs) == janet_unwrap_pointer(rhs);
}
}
/* Remove a root value from the GC. This allows the gc to potentially reclaim
* a value and all its children. */
2018-09-06 02:18:42 +00:00
int janet_gcunroot(Janet root) {
Janet *vtop = janet_vm.roots + janet_vm.root_count;
/* Search from top to bottom as access is most likely LIFO */
for (Janet *v = janet_vm.roots; v < vtop; v++) {
2018-09-06 02:18:42 +00:00
if (janet_gc_idequals(root, *v)) {
*v = janet_vm.roots[--janet_vm.root_count];
return 1;
}
}
return 0;
}
/* Remove a root value from the GC. This sets the effective reference count to 0. */
2018-09-06 02:18:42 +00:00
int janet_gcunrootall(Janet root) {
Janet *vtop = janet_vm.roots + janet_vm.root_count;
int ret = 0;
/* Search from top to bottom as access is most likely LIFO */
for (Janet *v = janet_vm.roots; v < vtop; v++) {
2018-09-06 02:18:42 +00:00
if (janet_gc_idequals(root, *v)) {
*v = janet_vm.roots[--janet_vm.root_count];
vtop--;
ret = 1;
}
}
return ret;
}
/* Free all allocated memory */
2018-09-06 02:18:42 +00:00
void janet_clear_memory(void) {
#ifdef JANET_EV
JanetKV *items = janet_vm.threaded_abstracts.data;
for (int32_t i = 0; i < janet_vm.threaded_abstracts.capacity; i++) {
if (janet_checktype(items[i].key, JANET_ABSTRACT)) {
void *abst = janet_unwrap_abstract(items[i].key);
if (0 == janet_abstract_decref(abst)) {
JanetAbstractHead *head = janet_abstract_head(abst);
if (head->type->gc) {
janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
}
janet_free(janet_abstract_head(abst));
}
}
}
#endif
JanetGCObject *current = janet_vm.blocks;
while (NULL != current) {
2018-09-06 02:18:42 +00:00
janet_deinit_block(current);
2021-10-22 00:48:37 +00:00
JanetGCObject *next = current->data.next;
janet_free(current);
current = next;
}
janet_vm.blocks = NULL;
janet_free_all_scratch();
janet_free(janet_vm.scratch_mem);
}
/* Primitives for suspending GC. */
int janet_gclock(void) {
return janet_vm.gc_suspend++;
}
void janet_gcunlock(int handle) {
janet_vm.gc_suspend = handle;
}
/* Scratch memory API */
void *janet_smalloc(size_t size) {
JanetScratch *s = janet_malloc(sizeof(JanetScratch) + size);
if (NULL == s) {
JANET_OUT_OF_MEMORY;
}
s->finalize = NULL;
if (janet_vm.scratch_len == janet_vm.scratch_cap) {
size_t newcap = 2 * janet_vm.scratch_cap + 2;
JanetScratch **newmem = (JanetScratch **) janet_realloc(janet_vm.scratch_mem, newcap * sizeof(JanetScratch));
if (NULL == newmem) {
JANET_OUT_OF_MEMORY;
}
janet_vm.scratch_cap = newcap;
janet_vm.scratch_mem = newmem;
}
janet_vm.scratch_mem[janet_vm.scratch_len++] = s;
return (char *)(s->mem);
}
2020-01-02 23:10:17 +00:00
void *janet_scalloc(size_t nmemb, size_t size) {
if (nmemb && size > SIZE_MAX / nmemb) {
2020-01-02 23:10:17 +00:00
JANET_OUT_OF_MEMORY;
}
size_t n = nmemb * size;
void *p = janet_smalloc(n);
memset(p, 0, n);
return p;
}
void *janet_srealloc(void *mem, size_t size) {
if (NULL == mem) return janet_smalloc(size);
JanetScratch *s = janet_mem2scratch(mem);
if (janet_vm.scratch_len) {
for (size_t i = janet_vm.scratch_len - 1; ; i--) {
if (janet_vm.scratch_mem[i] == s) {
JanetScratch *news = janet_realloc(s, size + sizeof(JanetScratch));
if (NULL == news) {
JANET_OUT_OF_MEMORY;
}
janet_vm.scratch_mem[i] = news;
return (char *)(news->mem);
}
if (i == 0) break;
}
}
JANET_EXIT("invalid janet_srealloc");
}
void janet_sfinalizer(void *mem, JanetScratchFinalizer finalizer) {
JanetScratch *s = janet_mem2scratch(mem);
s->finalize = finalizer;
}
void janet_sfree(void *mem) {
if (NULL == mem) return;
JanetScratch *s = janet_mem2scratch(mem);
if (janet_vm.scratch_len) {
for (size_t i = janet_vm.scratch_len - 1; ; i--) {
if (janet_vm.scratch_mem[i] == s) {
janet_vm.scratch_mem[i] = janet_vm.scratch_mem[--janet_vm.scratch_len];
free_one_scratch(s);
return;
}
if (i == 0) break;
}
}
JANET_EXIT("invalid janet_sfree");
}