mirror of
git://git.sv.gnu.org/emacs.git
synced 2026-01-04 11:00:45 -08:00
Variadic C functions now count arguments with ptrdiff_t.
This partly undoes my 2011-03-30 change, which replaced int with size_t. Back then I didn't know that the Emacs coding style prefers signed int. Also, in the meantime I found a few more instances where arguments were being counted with int, which may truncate counts on 64-bit machines, or EMACS_INT, which may be unnecessarily wide. * lisp.h (struct Lisp_Subr.function.aMANY) (DEFUN_ARGS_MANY, internal_condition_case_n, safe_call): Arg counts are now ptrdiff_t, not size_t. All variadic functions and their callers changed accordingly. (struct gcpro.nvars): Now size_t, not size_t. All uses changed. * bytecode.c (exec_byte_code): Check maxdepth for overflow, to avoid potential buffer overrun. Don't assume arg counts fit in 'int'. * callint.c (Fcall_interactively): Check arg count for overflow, to avoid potential buffer overrun. Use signed char, not 'int', for 'varies' array, so that we needn't bother to check its size calculation for overflow. * editfns.c (Fformat): Use ptrdiff_t, not EMACS_INT, to count args. * eval.c (apply_lambda): * fns.c (Fmapconcat): Use XFASTINT, not XINT, to get args length. (struct textprop_rec.argnum): Now ptrdiff_t, not int. All uses changed. (mapconcat): Use ptrdiff_t, not int and EMACS_INT, to count args.
This commit is contained in:
parent
a1759b7624
commit
f66c7cf8f7
19 changed files with 188 additions and 161 deletions
|
|
@ -433,7 +433,7 @@ If the third argument is incorrect, Emacs may crash. */)
|
|||
|
||||
Lisp_Object
|
||||
exec_byte_code (Lisp_Object bytestr, Lisp_Object vector, Lisp_Object maxdepth,
|
||||
Lisp_Object args_template, int nargs, Lisp_Object *args)
|
||||
Lisp_Object args_template, ptrdiff_t nargs, Lisp_Object *args)
|
||||
{
|
||||
int count = SPECPDL_INDEX ();
|
||||
#ifdef BYTE_CODE_METER
|
||||
|
|
@ -464,7 +464,7 @@ exec_byte_code (Lisp_Object bytestr, Lisp_Object vector, Lisp_Object maxdepth,
|
|||
|
||||
CHECK_STRING (bytestr);
|
||||
CHECK_VECTOR (vector);
|
||||
CHECK_NUMBER (maxdepth);
|
||||
CHECK_NATNUM (maxdepth);
|
||||
|
||||
#ifdef BYTE_CODE_SAFE
|
||||
const_length = ASIZE (vector);
|
||||
|
|
@ -486,6 +486,8 @@ exec_byte_code (Lisp_Object bytestr, Lisp_Object vector, Lisp_Object maxdepth,
|
|||
stack.byte_string = bytestr;
|
||||
stack.pc = stack.byte_string_start = SDATA (bytestr);
|
||||
stack.constants = vector;
|
||||
if (min (PTRDIFF_MAX, SIZE_MAX) / sizeof (Lisp_Object) < XFASTINT (maxdepth))
|
||||
memory_full (SIZE_MAX);
|
||||
top = (Lisp_Object *) alloca (XFASTINT (maxdepth)
|
||||
* sizeof (Lisp_Object));
|
||||
#if BYTE_MAINTAIN_TOP
|
||||
|
|
@ -502,14 +504,14 @@ exec_byte_code (Lisp_Object bytestr, Lisp_Object vector, Lisp_Object maxdepth,
|
|||
|
||||
if (INTEGERP (args_template))
|
||||
{
|
||||
int at = XINT (args_template);
|
||||
ptrdiff_t at = XINT (args_template);
|
||||
int rest = at & 128;
|
||||
int mandatory = at & 127;
|
||||
int nonrest = at >> 8;
|
||||
ptrdiff_t nonrest = at >> 8;
|
||||
eassert (mandatory <= nonrest);
|
||||
if (nargs <= nonrest)
|
||||
{
|
||||
int i;
|
||||
ptrdiff_t i;
|
||||
for (i = 0 ; i < nargs; i++, args++)
|
||||
PUSH (*args);
|
||||
if (nargs < mandatory)
|
||||
|
|
@ -528,7 +530,7 @@ exec_byte_code (Lisp_Object bytestr, Lisp_Object vector, Lisp_Object maxdepth,
|
|||
}
|
||||
else if (rest)
|
||||
{
|
||||
int i;
|
||||
ptrdiff_t i;
|
||||
for (i = 0 ; i < nonrest; i++, args++)
|
||||
PUSH (*args);
|
||||
PUSH (Flist (nargs - nonrest, args));
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue