Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/videolan/dav1d.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHenrik Gramner <gramner@twoorioles.com>2022-09-26 16:14:34 +0300
committerHenrik Gramner <henrik@gramner.com>2022-09-28 17:06:39 +0300
commit6fefa6a55cc1cc1cefad93e9f78efd0725fd8efa (patch)
tree03aeb4c50529b2c685037dc0ba8d178d08ec2fc9
parent8349845c8cb2d296e0d89416538c5f7215ca8a08 (diff)
checkasm: Improve 32-bit parameter clobbering on x86-64
Use explicit parameter type detection and manually clobber the upper bits instead of relying on internal compiler behavior.
-rw-r--r--meson.build4
-rw-r--r--tests/checkasm/checkasm.h65
-rw-r--r--tests/checkasm/x86/checkasm.asm97
3 files changed, 94 insertions, 72 deletions
diff --git a/meson.build b/meson.build
index a9bce50..c16594d 100644
--- a/meson.build
+++ b/meson.build
@@ -261,6 +261,10 @@ if cc.has_function('pthread_getaffinity_np', prefix : pthread_np_prefix, args :
cdata.set('HAVE_PTHREAD_GETAFFINITY_NP', 1)
endif
+if cc.compiles('int x = _Generic(0, default: 0);', name: '_Generic', args: test_args)
+ cdata.set('HAVE_C11_GENERIC', 1)
+endif
+
# Compiler flag tests
if cc.has_argument('-fvisibility=hidden')
diff --git a/tests/checkasm/checkasm.h b/tests/checkasm/checkasm.h
index fb5f8ed..29c1dbe 100644
--- a/tests/checkasm/checkasm.h
+++ b/tests/checkasm/checkasm.h
@@ -185,17 +185,6 @@ static inline uint64_t readtime(void) {
void checkasm_checked_call(void *func, ...);
#if ARCH_X86_64
-/* Evil hack: detect incorrect assumptions that 32-bit ints are zero-extended
- * to 64-bit. This is done by clobbering the stack with junk around the stack
- * pointer and calling the assembly function through checked_call() with added
- * dummy arguments which forces all real arguments to be passed on the stack
- * and not in registers. For 32-bit arguments the upper half of the 64-bit
- * register locations on the stack will now contain junk which will cause
- * misbehaving functions to either produce incorrect output or segfault. Note
- * that even though this works extremely well in practice, it's technically
- * not guaranteed and false negatives is theoretically possible, but there
- * can never be any false positives. */
-void checkasm_stack_clobber(uint64_t clobber, ...);
/* YMM and ZMM registers on x86 are turned off to save power when they haven't
* been used for some period of time. When they are used there will be a
* "warmup" period during which performance will be reduced and inconsistent
@@ -203,24 +192,54 @@ void checkasm_stack_clobber(uint64_t clobber, ...);
* work around this by periodically issuing "dummy" instructions that uses
* those registers to keep them powered on. */
void checkasm_simd_warmup(void);
-#define declare_new(ret, ...)\
- ret (*checked_call)(void *, int, int, int, int, int, __VA_ARGS__,\
- int, int, int, int, int, int, int, int,\
- int, int, int, int, int, int, int) =\
- (void *)checkasm_checked_call;
-#define CLOB (UINT64_C(0xdeadbeefdeadbeef))
+
+/* The upper 32 bits of 32-bit data types are undefined when passed as function
+ * parameters. In practice those bits usually end up being zero which may hide
+ * certain bugs, such as using a register containing undefined bits as a pointer
+ * offset, so we want to intentionally clobber those bits with junk to expose
+ * any issues. The following set of macros automatically calculates a bitmask
+ * specifying which parameters should have their upper halves clobbered. */
#ifdef _WIN32
-#define STACKARGS 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0
+/* Integer and floating-point parameters share "register slots". */
+#define IGNORED_FP_ARGS 0
#else
-#define STACKARGS 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0, 0, 0
+/* Up to 8 floating-point parameters are passed in XMM registers, which are
+ * handled orthogonally from integer parameters passed in GPR registers. */
+#define IGNORED_FP_ARGS 8
#endif
+#ifdef HAVE_C11_GENERIC
+#define clobber_type(arg) _Generic((void (*)(void*, arg))NULL,\
+ void (*)(void*, int32_t ): clobber_mask |= 1 << mpos++,\
+ void (*)(void*, uint32_t): clobber_mask |= 1 << mpos++,\
+ void (*)(void*, float ): mpos += (fp_args++ >= IGNORED_FP_ARGS),\
+ void (*)(void*, double ): mpos += (fp_args++ >= IGNORED_FP_ARGS),\
+ default: mpos++)
+#define init_clobber_mask(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, ...)\
+ unsigned clobber_mask = 0;\
+ {\
+ int mpos = 0, fp_args = 0;\
+ clobber_type(a); clobber_type(b); clobber_type(c); clobber_type(d);\
+ clobber_type(e); clobber_type(f); clobber_type(g); clobber_type(h);\
+ clobber_type(i); clobber_type(j); clobber_type(k); clobber_type(l);\
+ clobber_type(m); clobber_type(n); clobber_type(o); clobber_type(p);\
+ }
+#else
+/* Skip parameter clobbering on compilers without support for _Generic() */
+#define init_clobber_mask(...) unsigned clobber_mask = 0
+#endif
+#define declare_new(ret, ...)\
+ ret (*checked_call)(__VA_ARGS__, int, int, int, int, int, int, int,\
+ int, int, int, int, int, int, int, int, int,\
+ void*, unsigned) =\
+ (void*)checkasm_checked_call;\
+ init_clobber_mask(__VA_ARGS__, void*, void*, void*, void*,\
+ void*, void*, void*, void*, void*, void*,\
+ void*, void*, void*, void*, void*);
#define call_new(...)\
(checkasm_set_signal_handler_state(1),\
checkasm_simd_warmup(),\
- checkasm_stack_clobber(CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB,\
- CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB,\
- CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB),\
- checked_call(func_new, 0, 0, 0, 0, 0, __VA_ARGS__, STACKARGS));\
+ checked_call(__VA_ARGS__, 16, 15, 14, 13, 12, 11, 10, 9, 8,\
+ 7, 6, 5, 4, 3, 2, 1, func_new, clobber_mask));\
checkasm_set_signal_handler_state(0)
#elif ARCH_X86_32
#define declare_new(ret, ...)\
diff --git a/tests/checkasm/x86/checkasm.asm b/tests/checkasm/x86/checkasm.asm
index 313a127..22746fb 100644
--- a/tests/checkasm/x86/checkasm.asm
+++ b/tests/checkasm/x86/checkasm.asm
@@ -151,56 +151,44 @@ cglobal init_x86, 0, 5
RET
%if ARCH_X86_64
-;-----------------------------------------------------------------------------
-; int checkasm_stack_clobber(uint64_t clobber, ...)
-;-----------------------------------------------------------------------------
-cglobal stack_clobber, 1, 2
- ; Clobber the stack with junk below the stack pointer
- %define argsize (max_args+6)*8
- SUB rsp, argsize
- mov r1, argsize-8
-.loop:
- mov [rsp+r1], r0
- sub r1, 8
- jge .loop
- ADD rsp, argsize
- RET
-
%if WIN64
- %assign free_regs 7
%define stack_param rsp+32 ; shadow space
- %define num_stack_params rsp+stack_offset+22*8
+ %define num_fn_args rsp+stack_offset+17*8
+ %assign num_reg_args 4
+ %assign free_regs 7
+ %assign clobber_mask_stack_bit 16
DECLARE_REG_TMP 4
%else
- %assign free_regs 9
%define stack_param rsp
- %define num_stack_params rsp+stack_offset+16*8
+ %define num_fn_args rsp+stack_offset+11*8
+ %assign num_reg_args 6
+ %assign free_regs 9
+ %assign clobber_mask_stack_bit 64
DECLARE_REG_TMP 7
%endif
-;-----------------------------------------------------------------------------
-; void checkasm_checked_call(void *func, ...)
-;-----------------------------------------------------------------------------
+%macro CLOBBER_UPPER 2 ; reg, mask_bit
+ mov r13d, %1d
+ or r13, r8
+ test r9b, %2
+ cmovnz %1, r13
+%endmacro
+
cglobal checked_call, 2, 15, 16, max_args*8+64+8
- mov t0, r0
+ mov r10d, [num_fn_args]
+ mov r8, 0xdeadbeef00000000
+ mov r9d, [num_fn_args+r10*8+8] ; clobber_mask
+ mov t0, [num_fn_args+r10*8] ; func
- ; All arguments have been pushed on the stack instead of registers in
- ; order to test for incorrect assumptions that 32-bit ints are
- ; zero-extended to 64-bit.
- mov r0, r6mp
- mov r1, r7mp
- mov r2, r8mp
- mov r3, r9mp
+ ; Clobber the upper halves of 32-bit parameters
+ CLOBBER_UPPER r0, 1
+ CLOBBER_UPPER r1, 2
+ CLOBBER_UPPER r2, 4
+ CLOBBER_UPPER r3, 8
%if UNIX64
- mov r4, r10mp
- mov r5, r11mp
+ CLOBBER_UPPER r4, 16
+ CLOBBER_UPPER r5, 32
%else ; WIN64
- ; Move possible floating-point arguments to the correct registers
- movq m0, r0
- movq m1, r1
- movq m2, r2
- movq m3, r3
-
%assign i 6
%rep 16-6
mova m %+ i, [x %+ i]
@@ -208,22 +196,29 @@ cglobal checked_call, 2, 15, 16, max_args*8+64+8
%endrep
%endif
+ xor r11d, r11d
+ sub r10d, num_reg_args
+ cmovs r10d, r11d ; num stack args
+
; write stack canaries to the area above parameters passed on the stack
- mov r9d, [num_stack_params]
- mov r8, [rsp+stack_offset] ; return address
- not r8
+ mov r12, [rsp+stack_offset] ; return address
+ not r12
%assign i 0
%rep 8 ; 64 bytes
- mov [stack_param+(r9+i)*8], r8
+ mov [stack_param+(r10+i)*8], r12
%assign i i+1
%endrep
- dec r9d
- jl .stack_setup_done ; no stack parameters
+
+ test r10d, r10d
+ jz .stack_setup_done ; no stack parameters
.copy_stack_parameter:
- mov r8, [stack_param+stack_offset+7*8+r9*8]
- mov [stack_param+r9*8], r8
- dec r9d
- jge .copy_stack_parameter
+ mov r12, [stack_param+stack_offset+8+r11*8]
+ CLOBBER_UPPER r12, clobber_mask_stack_bit
+ shr r9d, 1
+ mov [stack_param+r11*8], r12
+ inc r11d
+ cmp r11d, r10d
+ jl .copy_stack_parameter
.stack_setup_done:
%assign i 14
@@ -234,7 +229,11 @@ cglobal checked_call, 2, 15, 16, max_args*8+64+8
call t0
; check for stack corruption
- mov r0d, [num_stack_params]
+ mov r0d, [num_fn_args]
+ xor r3d, r3d
+ sub r0d, num_reg_args
+ cmovs r0d, r3d ; num stack args
+
mov r3, [rsp+stack_offset]
mov r4, [stack_param+r0*8]
not r3