[wip] memory: ensure disabled interrupts in top-level operators

ecl_alloc_object, ecl_free_object
ecl_alloc, ecl_alloc_manual, ecl_alloc_atomic, ecl_dealloc

Moreover move all top-level ops to memory.d so they are not reliant on mem_gc.
The stubbed allocator uses manual memory managament for all ops.

[wip] because we should adjust ecl_make_stack too
This commit is contained in:
Daniel Kochmański 2025-05-22 12:57:29 +02:00
parent 4de3f36ee1
commit b607796113
9 changed files with 155 additions and 205 deletions

View file

@ -42,9 +42,7 @@
*
* The GMP library may also allocate temporary memory for its
* computations. It is configurable at runtime whether we use malloc
* and free or the corresponding equivalents from the garbage
* collector (ecl_alloc_uncollectable and ecl_free_uncollectable) for
* that.
* and free or the corresponding equivalents from the GC.
*/
/*************************************************************

View file

@ -16,6 +16,28 @@
#include <ecl/internal.h>
#include <ecl/ecl-inl.h>
static cl_index stamp = 0;
cl_index ecl_next_stamp() {
#if ECL_THREADS
return AO_fetch_and_add((AO_t*)&stamp, 1) + 1;
#else
return ++stamp;
#endif
}
cl_object
ecl_alloc_instance(cl_index slots)
{
cl_object i;
i = ecl_alloc_object(t_instance);
i->instance.slots = (cl_object *)ecl_alloc(sizeof(cl_object) * slots);
i->instance.length = slots;
i->instance.isgf = ECL_NOT_FUNCALLABLE;
i->instance.entry = FEnot_funcallable_vararg;
i->instance.slotds = ECL_UNBOUND;
return i;
}
cl_object
ecl_allocate_instance(cl_object clas, cl_index size)
{

View file

@ -313,7 +313,7 @@ si_allocate_foreign_data(cl_object tag, cl_object size)
/* FIXME! Should be atomic uncollectable or malloc, but we do not export
* that garbage collector interface and malloc may be overwritten
* by the GC library */
output->foreign.data = bytes? ecl_alloc_uncollectable(bytes) : NULL;
output->foreign.data = bytes? ecl_alloc_manual(bytes) : NULL;
@(return output);
}
@ -326,7 +326,7 @@ si_free_foreign_data(cl_object f)
}
if (f->foreign.size) {
/* See si_allocate_foreign_data() */
ecl_free_uncollectable(f->foreign.data);
ecl_free(f->foreign.data);
}
f->foreign.size = 0;
f->foreign.data = NULL;

View file

@ -57,7 +57,7 @@ _ecl_set_max_heap_size(size_t new_size)
GC_set_max_heap_size(ecl_core.max_heap_size = new_size);
if (new_size == 0) {
cl_index size = ecl_option_values[ECL_OPT_HEAP_SAFETY_AREA];
ecl_core.safety_region = ecl_alloc_atomic_unprotected(size);
ecl_core.safety_region = GC_MALLOC_ATOMIC_IGNORE_OFF_PAGE(size);
} else if (ecl_core.safety_region) {
GC_FREE(ecl_core.safety_region);
ecl_core.safety_region = 0;
@ -180,38 +180,20 @@ allocate_object_error(struct bdw_type_information *bdw_type_info)
static cl_object
allocate_object_atomic(struct bdw_type_information *bdw_type_info)
{
const cl_env_ptr the_env = ecl_process_env();
cl_object op;
ecl_disable_interrupts_env(the_env);
op = GC_MALLOC_ATOMIC(bdw_type_info->size);
op->d.t = bdw_type_info->t;
ecl_enable_interrupts_env(the_env);
return op;
return GC_MALLOC_ATOMIC(bdw_type_info->size);
}
static cl_object
allocate_object_full(struct bdw_type_information *bdw_type_info)
{
const cl_env_ptr the_env = ecl_process_env();
cl_object op;
ecl_disable_interrupts_env(the_env);
op = GC_MALLOC(bdw_type_info->size);
op->d.t = bdw_type_info->t;
ecl_enable_interrupts_env(the_env);
return op;
return GC_MALLOC(bdw_type_info->size);
}
#ifdef GBC_BOEHM_PRECISE
static cl_object
allocate_object_typed(struct bdw_type_information *bdw_type_info)
{
const cl_env_ptr the_env = ecl_process_env();
cl_object op;
ecl_disable_interrupts_env(the_env);
op = GC_malloc_explicitly_typed(bdw_type_info->size, bdw_type_info->descriptor);
op->d.t = bdw_type_info->t;
ecl_enable_interrupts_env(the_env);
return op;
return GC_malloc_explicitly_typed(bdw_type_info->size, bdw_type_info->descriptor);
}
#endif
@ -244,122 +226,10 @@ cl_object_mark_proc(void *addr, struct GC_ms_entry *msp, struct GC_ms_entry *msl
static cl_object
allocate_object_marked(struct bdw_type_information *bdw_type_info)
{
const cl_env_ptr the_env = ecl_process_env();
cl_object op;
ecl_disable_interrupts_env(the_env);
op = GC_generic_malloc(bdw_type_info->size, cl_object_kind);
op->d.t = bdw_type_info->t;
ecl_enable_interrupts_env(the_env);
return op;
return GC_generic_malloc(bdw_type_info->size, cl_object_kind);
}
#endif
static cl_object
alloc_object(cl_type t)
{
struct bdw_type_information *ti = bdw_type_info + t;
return ti->allocator(ti);
}
cl_object
ecl_alloc_compact_object(cl_type t, cl_index extra_space)
{
const cl_env_ptr the_env = ecl_process_env();
cl_index size = bdw_type_info[t].size;
cl_object x;
ecl_disable_interrupts_env(the_env);
x = (cl_object)GC_MALLOC_ATOMIC(size + extra_space);
ecl_enable_interrupts_env(the_env);
x->array.t = t;
x->array.displaced = (void*)(((char*)x) + size);
return x;
}
cl_object
ecl_alloc_instance(cl_index slots)
{
cl_object i;
i = ecl_alloc_object(t_instance);
i->instance.slots = (cl_object *)ecl_alloc(sizeof(cl_object) * slots);
i->instance.length = slots;
i->instance.isgf = ECL_NOT_FUNCALLABLE;
i->instance.entry = FEnot_funcallable_vararg;
i->instance.slotds = ECL_UNBOUND;
return i;
}
static cl_index stamp = 0;
cl_index ecl_next_stamp() {
#if ECL_THREADS
return AO_fetch_and_add((AO_t*)&stamp, 1) + 1;
#else
return ++stamp;
#endif
}
void *
ecl_alloc_uncollectable(size_t size)
{
const cl_env_ptr the_env = ecl_process_env();
void *output;
ecl_disable_interrupts_env(the_env);
output = GC_MALLOC_UNCOLLECTABLE(size);
ecl_enable_interrupts_env(the_env);
return output;
}
void
ecl_free_uncollectable(void *pointer)
{
const cl_env_ptr the_env = ecl_process_env();
ecl_disable_interrupts_env(the_env);
GC_FREE(pointer);
ecl_enable_interrupts_env(the_env);
}
void *
ecl_alloc_unprotected(cl_index n)
{
return GC_MALLOC_IGNORE_OFF_PAGE(n);
}
void *
ecl_alloc_atomic_unprotected(cl_index n)
{
return GC_MALLOC_ATOMIC_IGNORE_OFF_PAGE(n);
}
void *
ecl_alloc(cl_index n)
{
const cl_env_ptr the_env = ecl_process_env();
void *output;
ecl_disable_interrupts_env(the_env);
output = ecl_alloc_unprotected(n);
ecl_enable_interrupts_env(the_env);
return output;
}
void *
ecl_alloc_atomic(cl_index n)
{
const cl_env_ptr the_env = ecl_process_env();
void *output;
ecl_disable_interrupts_env(the_env);
output = ecl_alloc_atomic_unprotected(n);
ecl_enable_interrupts_env(the_env);
return output;
}
void
ecl_dealloc(void *ptr)
{
const cl_env_ptr the_env = ecl_process_env();
ecl_disable_interrupts_env(the_env);
GC_FREE(ptr);
ecl_enable_interrupts_env(the_env);
}
/* -- weak pointers ---------------------------------------------------------- */
cl_object
@ -410,18 +280,6 @@ si_weak_pointer_value(cl_object o)
/* -- graph traversal -------------------------------------------------------- */
#ifdef GBC_BOEHM_PRECISE
static cl_index
to_bitmap(void *x, void *y)
{
cl_index n = (char*)y - (char*)x;
if (n % sizeof(void*))
ecl_internal_error("Misaligned pointer in ECL structure.");
n /= sizeof(void*);
return 1 << n;
}
#endif
void init_bdw_type_info (void)
{
int i;
@ -879,23 +737,50 @@ si_gc_dump()
/* -- module definition ------------------------------------------------------ */
static cl_object
alloc_object(cl_type t)
{
struct bdw_type_information *ti = bdw_type_info + t;
return ti->allocator(ti);
}
static void *
alloc_memory(cl_index size)
{
return GC_MALLOC(size);
}
static void *
alloc_manual(cl_index size)
{
return GC_MALLOC_UNCOLLECTABLE(size);
}
static void *
alloc_atomic(cl_index size)
{
return GC_MALLOC_ATOMIC_IGNORE_OFF_PAGE(size);
}
void
free_memory(void *ptr)
{
GC_FREE(ptr);
}
static void
free_object(cl_object o)
{
standard_finalizer(o);
ecl_dealloc(o);
free_memory(o);
}
struct ecl_allocator_ops gc_ops = {
.allocate_memory = alloc_memory,
.allocate_atomic = alloc_atomic,
.allocate_manual = alloc_manual,
.allocate_object = alloc_object,
.free_memory = ecl_dealloc,
.free_memory = free_memory,
.free_object = free_object
};
@ -948,7 +833,7 @@ create_gc()
/* Save some memory for the case we get tight. */
if (ecl_core.max_heap_size == 0) {
cl_index size = ecl_option_values[ECL_OPT_HEAP_SAFETY_AREA];
ecl_core.safety_region = ecl_alloc_atomic_unprotected(size);
ecl_core.safety_region = GC_MALLOC_ATOMIC_IGNORE_OFF_PAGE(size);
} else if (ecl_core.safety_region) {
ecl_core.safety_region = 0;
}

View file

@ -38,15 +38,7 @@ out_of_memory()
void *
ecl_malloc(cl_index n)
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
void *ptr;
if (!the_env) {
ptr = malloc(n);
} else {
ecl_disable_interrupts_env(the_env);
ptr = malloc(n);
ecl_enable_interrupts_env(the_env);
}
void *ptr = malloc(n);
if (ptr == NULL) out_of_memory();
return ptr;
}
@ -54,27 +46,13 @@ ecl_malloc(cl_index n)
void
ecl_free(void *ptr)
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
if (!the_env) {
free(ptr);
} else {
ecl_disable_interrupts_env(the_env);
free(ptr);
ecl_enable_interrupts_env(the_env);
}
free(ptr);
}
void *
ecl_realloc(void *ptr, cl_index osize, cl_index nsize)
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
if (!the_env) {
ptr = realloc(ptr, nsize);
} else {
ecl_disable_interrupts_env(the_env);
ptr = realloc(ptr, nsize);
ecl_enable_interrupts_env(the_env);
}
ptr = realloc(ptr, nsize);
if (ptr == NULL) out_of_memory();
return ptr;
}
@ -326,34 +304,105 @@ ecl_alloc_object(cl_type t)
case t_fixnum:
return ecl_make_fixnum(0); /* Immediate fixnum */
default:
return ecl_core.allocator->allocate_object(t);
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
cl_object o;
if(the_env) ecl_disable_interrupts_env(the_env);
o = ecl_core.allocator->allocate_object(t);
o->d.t = t;
if(the_env) ecl_enable_interrupts_env(the_env);
return o;
}
}
}
void *
ecl_alloc_memory(cl_index n)
ecl_alloc(cl_index n)
{
return ecl_core.allocator->allocate_memory(n);
const cl_env_ptr the_env = ecl_process_env_unsafe();
void *ptr = NULL;
if(!the_env) {
return ecl_core.allocator->allocate_memory(n);
} else {
ecl_disable_interrupts_env(the_env);
ptr = ecl_core.allocator->allocate_memory(n);
ecl_enable_interrupts_env(the_env);
}
return ptr;
}
void *
ecl_alloc_atomic(cl_index n)
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
void *ptr = NULL;
if(!the_env) {
return ecl_core.allocator->allocate_atomic(n);
} else {
ecl_disable_interrupts_env(the_env);
ptr = ecl_core.allocator->allocate_atomic(n);
ecl_enable_interrupts_env(the_env);
}
return ptr;
}
void *
ecl_alloc_manual(cl_index n)
{
const cl_env_ptr the_env = ecl_process_env_unsafe();
void *ptr = NULL;
if(!the_env) {
return ecl_core.allocator->allocate_manual(n);
} else {
ecl_disable_interrupts_env(the_env);
ptr = ecl_core.allocator->allocate_manual(n);
ecl_enable_interrupts_env(the_env);
}
return ptr;
}
void
ecl_free_object(cl_object ptr)
{
return ecl_core.allocator->free_object(ptr);
const cl_env_ptr the_env = ecl_process_env_unsafe();
if(!the_env) {
ecl_core.allocator->free_object(ptr);
} else {
ecl_disable_interrupts_env(the_env);
ecl_core.allocator->free_object(ptr);
ecl_enable_interrupts_env(the_env);
}
}
void
ecl_free_memory(void *ptr)
ecl_dealloc(void *ptr)
{
return ecl_core.allocator->free_memory(ptr);
const cl_env_ptr the_env = ecl_process_env_unsafe();
if(!the_env) {
ecl_core.allocator->free_memory(ptr);
} else {
ecl_disable_interrupts_env(the_env);
ecl_core.allocator->free_memory(ptr);
ecl_enable_interrupts_env(the_env);
}
}
/* -- Helpers --------------------------------------------------------------- */
cl_object /* used by bignum.d */
ecl_alloc_compact_object(cl_type t, cl_index extra_space)
{
cl_index size = ecl_type_info[t].size;
cl_object x = ecl_alloc_atomic(size + extra_space);
x->array.t = t;
x->array.displaced = (void*)(((char*)x) + size);
return x;
}
cl_object
ecl_cons(cl_object a, cl_object d)
{
struct ecl_cons *obj = ecl_alloc_memory(sizeof(struct ecl_cons));
struct ecl_cons *obj = ecl_alloc(sizeof(struct ecl_cons));
#ifdef ECL_SMALL_CONS
obj->car = a;
obj->cdr = d;
@ -498,17 +547,21 @@ ecl_put_reader_token(cl_object token)
static cl_object
alloc_object(cl_type t)
{
ecl_internal_error("*** memory: alloc_object not implemented.\n");
struct ecl_type_information *ti = ecl_type_info + t;
return ecl_malloc(ti->size);
}
static void
free_object(cl_object self)
free_object(cl_object o)
{
ecl_internal_error("*** memory: free_object not implemented.\n");
/* FIXME this should invoke the finalizer! That is - reify finalizers here. */
ecl_free(o);
}
struct ecl_allocator_ops manual_allocator = {
.allocate_memory = ecl_malloc,
.allocate_atomic = ecl_malloc,
.allocate_manual = ecl_malloc,
.allocate_object = alloc_object,
.free_memory = ecl_free,
.free_object = free_object

View file

@ -811,6 +811,7 @@ cl_object ecl_module_stacks = (cl_object)&module_stacks;
cl_object
ecl_make_stack(cl_index size)
{
/* XXX ecl_alloc flags=manual */
cl_object x = ecl_malloc(sizeof(struct ecl_vector));
x->vector.t = t_vector;
x->vector.elttype = ecl_aet_object;

View file

@ -249,31 +249,26 @@ extern ECL_API const cl_object ecl_ct_minus_half;
extern ECL_API const cl_object ecl_ct_protect_tag;
extern ECL_API const cl_object ecl_ct_dummy_tag;
/* alloc.c / alloc_2.c */
/* memory */
extern ECL_API cl_object ecl_alloc_object(cl_type t);
extern ECL_API cl_object ecl_alloc_instance(cl_index slots);
extern ECL_API cl_object ecl_alloc_weak_pointer(cl_object o);
extern ECL_API cl_object ecl_alloc_compact_object(cl_type t, cl_index extra_space);
extern ECL_API cl_object ecl_cons(cl_object a, cl_object d);
extern ECL_API void ecl_free_object(cl_object o);
#define ecl_list1(x) ecl_cons(x, ECL_NIL)
extern ECL_API cl_object si_make_weak_pointer(cl_object o);
extern ECL_API cl_object si_weak_pointer_value(cl_object o);
#ifdef GBC_BOEHM
extern ECL_API void *ecl_alloc_unprotected(cl_index n);
extern ECL_API void *ecl_alloc_atomic_unprotected(cl_index n);
extern ECL_API void *ecl_alloc(cl_index n);
extern ECL_API void *ecl_alloc_manual(cl_index n);
extern ECL_API void *ecl_alloc_atomic(cl_index n);
extern ECL_API void *ecl_alloc_uncollectable(size_t size);
extern ECL_API void ecl_free_uncollectable(void *);
extern ECL_API void ecl_dealloc(void *);
#define ecl_alloc_align(s,d) ecl_alloc(s)
#define ecl_alloc_atomic_align(s,d) ecl_alloc_atomic(s)
#else /* Ideally the core would not depend on these. */
# error "IMPLEMENT ME!"
#endif /* GBC_BOEHM */
/* all_symbols */

View file

@ -74,10 +74,4 @@ cl_object ecl_raise(ecl_ex_type t, bool ret,
#define ecl_cerror4(extype,a1,a2,a3) ecl_raise(extype, 1, a1, a2, a3, NULL)
#define ecl_cerror5(extype,a1,a2,a3,p4) ecl_raise(extype, 1, a1, a2, a3, p4)
/* memory.c */
void *ecl_alloc_memory(cl_index n);
cl_object ecl_alloc_object(cl_type t);
void ecl_free_memory(void *ptr);
void ecl_free_object(cl_object o);
#endif /* ECL_NUCLEUS_H */

View file

@ -112,6 +112,8 @@ typedef cl_object (*cl_objectfn_envfn)(cl_env_ptr);
/* Allocator interface */
struct ecl_allocator_ops {
void *(*allocate_memory)(cl_index n); /* low-level alloc */
void *(*allocate_manual)(cl_index n); /* low-level alloc */
void *(*allocate_atomic)(cl_index n); /* low-level alloc */
cl_object (*allocate_object)(cl_type t); /* high-level alloc */
void (*free_memory)(void*); /* low-level free */
void (*free_object)(cl_object); /* high-level free */