Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/videolan/dav1d.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'tests/checkasm/checkasm.h')
-rw-r--r--tests/checkasm/checkasm.h65
1 files changed, 42 insertions, 23 deletions
diff --git a/tests/checkasm/checkasm.h b/tests/checkasm/checkasm.h
index fb5f8ed..29c1dbe 100644
--- a/tests/checkasm/checkasm.h
+++ b/tests/checkasm/checkasm.h
@@ -185,17 +185,6 @@ static inline uint64_t readtime(void) {
void checkasm_checked_call(void *func, ...);
#if ARCH_X86_64
-/* Evil hack: detect incorrect assumptions that 32-bit ints are zero-extended
- * to 64-bit. This is done by clobbering the stack with junk around the stack
- * pointer and calling the assembly function through checked_call() with added
- * dummy arguments which forces all real arguments to be passed on the stack
- * and not in registers. For 32-bit arguments the upper half of the 64-bit
- * register locations on the stack will now contain junk which will cause
- * misbehaving functions to either produce incorrect output or segfault. Note
- * that even though this works extremely well in practice, it's technically
- * not guaranteed and false negatives is theoretically possible, but there
- * can never be any false positives. */
-void checkasm_stack_clobber(uint64_t clobber, ...);
/* YMM and ZMM registers on x86 are turned off to save power when they haven't
* been used for some period of time. When they are used there will be a
* "warmup" period during which performance will be reduced and inconsistent
@@ -203,24 +192,54 @@ void checkasm_stack_clobber(uint64_t clobber, ...);
* work around this by periodically issuing "dummy" instructions that uses
* those registers to keep them powered on. */
void checkasm_simd_warmup(void);
-#define declare_new(ret, ...)\
- ret (*checked_call)(void *, int, int, int, int, int, __VA_ARGS__,\
- int, int, int, int, int, int, int, int,\
- int, int, int, int, int, int, int) =\
- (void *)checkasm_checked_call;
-#define CLOB (UINT64_C(0xdeadbeefdeadbeef))
+
+/* The upper 32 bits of 32-bit data types are undefined when passed as function
+ * parameters. In practice those bits usually end up being zero which may hide
+ * certain bugs, such as using a register containing undefined bits as a pointer
+ * offset, so we want to intentionally clobber those bits with junk to expose
+ * any issues. The following set of macros automatically calculates a bitmask
+ * specifying which parameters should have their upper halves clobbered. */
#ifdef _WIN32
-#define STACKARGS 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0
+/* Integer and floating-point parameters share "register slots". */
+#define IGNORED_FP_ARGS 0
#else
-#define STACKARGS 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 0, 0, 0, 0, 0
+/* Up to 8 floating-point parameters are passed in XMM registers, which are
+ * handled orthogonally from integer parameters passed in GPR registers. */
+#define IGNORED_FP_ARGS 8
#endif
+#ifdef HAVE_C11_GENERIC
+#define clobber_type(arg) _Generic((void (*)(void*, arg))NULL,\
+ void (*)(void*, int32_t ): clobber_mask |= 1 << mpos++,\
+ void (*)(void*, uint32_t): clobber_mask |= 1 << mpos++,\
+ void (*)(void*, float ): mpos += (fp_args++ >= IGNORED_FP_ARGS),\
+ void (*)(void*, double ): mpos += (fp_args++ >= IGNORED_FP_ARGS),\
+ default: mpos++)
+#define init_clobber_mask(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, ...)\
+ unsigned clobber_mask = 0;\
+ {\
+ int mpos = 0, fp_args = 0;\
+ clobber_type(a); clobber_type(b); clobber_type(c); clobber_type(d);\
+ clobber_type(e); clobber_type(f); clobber_type(g); clobber_type(h);\
+ clobber_type(i); clobber_type(j); clobber_type(k); clobber_type(l);\
+ clobber_type(m); clobber_type(n); clobber_type(o); clobber_type(p);\
+ }
+#else
+/* Skip parameter clobbering on compilers without support for _Generic() */
+#define init_clobber_mask(...) unsigned clobber_mask = 0
+#endif
+#define declare_new(ret, ...)\
+ ret (*checked_call)(__VA_ARGS__, int, int, int, int, int, int, int,\
+ int, int, int, int, int, int, int, int, int,\
+ void*, unsigned) =\
+ (void*)checkasm_checked_call;\
+ init_clobber_mask(__VA_ARGS__, void*, void*, void*, void*,\
+ void*, void*, void*, void*, void*, void*,\
+ void*, void*, void*, void*, void*);
#define call_new(...)\
(checkasm_set_signal_handler_state(1),\
checkasm_simd_warmup(),\
- checkasm_stack_clobber(CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB,\
- CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB,\
- CLOB, CLOB, CLOB, CLOB, CLOB, CLOB, CLOB),\
- checked_call(func_new, 0, 0, 0, 0, 0, __VA_ARGS__, STACKARGS));\
+ checked_call(__VA_ARGS__, 16, 15, 14, 13, 12, 11, 10, 9, 8,\
+ 7, 6, 5, 4, 3, 2, 1, func_new, clobber_mask));\
checkasm_set_signal_handler_state(0)
#elif ARCH_X86_32
#define declare_new(ret, ...)\