typedef uintptr_t byte_ct;
typedef intptr_t object_ct;
-/* Number of live and free conses etc. */
+/* Number of live and free conses etc. counted by the most-recent GC. */
static struct gcstat
{
/* Head of a circularly-linked list of finalizers that must be invoked
because we deemed them unreachable. This list must be global, and
- not a local inside garbage_collect_1, in case we GC again while
+ not a local inside garbage_collect, in case we GC again while
running finalizers. */
struct Lisp_Finalizer doomed_finalizers;
newi->next = interval_block;
interval_block = newi;
interval_block_index = 0;
- gcstat.total_free_intervals += INTERVAL_BLOCK_SIZE;
}
val = &interval_block->intervals[interval_block_index++];
}
consing_until_gc -= sizeof (struct interval);
intervals_consed++;
- gcstat.total_free_intervals--;
RESET_INTERVAL (val);
val->gcmarkbit = 0;
return val;
NEXT_FREE_LISP_STRING (s) = string_free_list;
string_free_list = ptr_bounds_clip (s, sizeof *s);
}
-
- gcstat.total_free_strings += STRING_BLOCK_SIZE;
}
check_string_free_list ();
MALLOC_UNBLOCK_INPUT;
- gcstat.total_free_strings--;
- gcstat.total_strings++;
++strings_consed;
consing_until_gc -= sizeof *s;
memset (new->gcmarkbits, 0, sizeof new->gcmarkbits);
float_block = new;
float_block_index = 0;
- gcstat.total_free_floats += FLOAT_BLOCK_SIZE;
}
XSETFLOAT (val, &float_block->floats[float_block_index]);
float_block_index++;
eassert (!XFLOAT_MARKED_P (XFLOAT (val)));
consing_until_gc -= sizeof (struct Lisp_Float);
floats_consed++;
- gcstat.total_free_floats--;
return val;
}
cons_free_list = ptr;
if (INT_ADD_WRAPV (consing_until_gc, sizeof *ptr, &consing_until_gc))
consing_until_gc = INTMAX_MAX;
- gcstat.total_free_conses++;
}
DEFUN ("cons", Fcons, Scons, 2, 2, 0,
{
if (cons_block_index == CONS_BLOCK_SIZE)
{
- /* Maximum number of conses that should be active at any
- given time, so that list lengths fit into a ptrdiff_t and
- into a fixnum. */
- ptrdiff_t max_conses = min (PTRDIFF_MAX, MOST_POSITIVE_FIXNUM);
-
- /* This check is typically optimized away, as a runtime
- check is needed only on weird platforms where a count of
- distinct conses might not fit. */
- if (max_conses < INTPTR_MAX / sizeof (struct Lisp_Cons)
- && (max_conses - CONS_BLOCK_SIZE
- < gcstat.total_free_conses + gcstat.total_conses))
- memory_full (sizeof (struct cons_block));
-
struct cons_block *new
= lisp_align_malloc (sizeof *new, MEM_TYPE_CONS);
memset (new->gcmarkbits, 0, sizeof new->gcmarkbits);
new->next = cons_block;
cons_block = new;
cons_block_index = 0;
- gcstat.total_free_conses += CONS_BLOCK_SIZE;
}
XSETCONS (val, &cons_block->conses[cons_block_index]);
cons_block_index++;
XSETCDR (val, cdr);
eassert (!XCONS_MARKED_P (XCONS (val)));
consing_until_gc -= sizeof (struct Lisp_Cons);
- gcstat.total_free_conses--;
cons_cells_consed++;
return val;
}
eassert (vindex < VECTOR_MAX_FREE_LIST_INDEX);
set_next_vector (v, vector_free_lists[vindex]);
vector_free_lists[vindex] = v;
- gcstat.total_free_vector_slots += nbytes / word_size;
}
/* Get a new vector block. */
{
vector = vector_free_lists[index];
vector_free_lists[index] = next_vector (vector);
- gcstat.total_free_vector_slots -= nbytes / word_size;
return vector;
}
/* This vector is larger than requested. */
vector = vector_free_lists[index];
vector_free_lists[index] = next_vector (vector);
- gcstat.total_free_vector_slots -= nbytes / word_size;
/* Excess bytes are used for the smaller vector,
which should be set on an appropriate free list. */
space was coalesced into the only free vector. */
free_this_block = true;
else
- setup_on_free_list (vector, total_bytes);
+ {
+ setup_on_free_list (vector, total_bytes);
+ gcstat.total_free_vector_slots += total_bytes / word_size;
+ }
}
}
new->next = symbol_block;
symbol_block = new;
symbol_block_index = 0;
- gcstat.total_free_symbols += SYMBOL_BLOCK_SIZE;
}
XSETSYMBOL (val, &symbol_block->symbols[symbol_block_index]);
symbol_block_index++;
init_symbol (val, name);
consing_until_gc -= sizeof (struct Lisp_Symbol);
symbols_consed++;
- gcstat.total_free_symbols--;
return val;
}
There are other GC roots of course, but these roots are dynamic
runtime data structures that pdump doesn't care about and so we can
- continue to mark those directly in garbage_collect_1. */
+ continue to mark those directly in garbage_collect. */
void
visit_static_gc_roots (struct gc_root_visitor visitor)
{
}
/* List of weak hash tables we found during marking the Lisp heap.
- Will be NULL on entry to garbage_collect_1 and after it
- returns. */
+ NULL on entry to garbage_collect and after it returns. */
static struct Lisp_Hash_Table *weak_hash_tables;
NO_INLINE /* For better stack traces */
}
/* Subroutine of Fgarbage_collect that does most of the work. */
-static bool
-garbage_collect_1 (struct gcstat *gcst)
+void
+garbage_collect (void)
{
struct buffer *nextb;
char stack_top_variable;
eassert (weak_hash_tables == NULL);
if (garbage_collection_inhibited)
- return false;
+ return;
/* Record this function, so it appears on the profiler's backtraces. */
record_in_backtrace (QAutomatic_GC, 0, 0);
unbind_to (count, Qnil);
- *gcst = gcstat;
-
/* GC is complete: now we can run our finalizer callbacks. */
run_finalizers (&doomed_finalizers);
byte_ct swept = tot_before <= tot_after ? 0 : tot_before - tot_after;
malloc_probe (min (swept, SIZE_MAX));
}
-
- return true;
-}
-
-void
-garbage_collect (void)
-{
- struct gcstat gcst;
- garbage_collect_1 (&gcst);
}
DEFUN ("garbage-collect", Fgarbage_collect, Sgarbage_collect, 0, 0, "",
See Info node `(elisp)Garbage Collection'. */)
(void)
{
- struct gcstat gcst;
- if (!garbage_collect_1 (&gcst))
+ if (garbage_collection_inhibited)
return Qnil;
+ garbage_collect ();
+ struct gcstat gcst = gcstat;
+
Lisp_Object total[] = {
list4 (Qconses, make_fixnum (sizeof (struct Lisp_Cons)),
make_int (gcst.total_conses),