|
From: <sv...@va...> - 2010-09-03 15:14:50
|
Author: sewardj
Date: 2010-09-03 16:14:41 +0100 (Fri, 03 Sep 2010)
New Revision: 11336
Log:
Make the leak tests a whole lot less flaky on ppc32/64-linux by
zeroing out caller saves registers before the leak check. We should
really do this on all platforms, not just these.
(Maynard Johnson, may...@us...)
Modified:
trunk/memcheck/tests/leak-cases.c
trunk/memcheck/tests/leak-cycle.c
trunk/memcheck/tests/leak.h
Modified: trunk/memcheck/tests/leak-cases.c
===================================================================
--- trunk/memcheck/tests/leak-cases.c 2010-09-03 14:36:50 UTC (rev 11335)
+++ trunk/memcheck/tests/leak-cases.c 2010-09-03 15:14:41 UTC (rev 11336)
@@ -106,6 +106,7 @@
// counting in main() avoids the problem.
f();
+ CLEAR_CALLER_SAVED_REGS;
GET_FINAL_LEAK_COUNTS;
PRINT_LEAK_COUNTS(stderr);
Modified: trunk/memcheck/tests/leak-cycle.c
===================================================================
--- trunk/memcheck/tests/leak-cycle.c 2010-09-03 14:36:50 UTC (rev 11335)
+++ trunk/memcheck/tests/leak-cycle.c 2010-09-03 15:14:41 UTC (rev 11336)
@@ -68,6 +68,8 @@
c1 = c2 = 0;
+ CLEAR_CALLER_SAVED_REGS;
+
GET_FINAL_LEAK_COUNTS;
PRINT_LEAK_COUNTS(stderr);
Modified: trunk/memcheck/tests/leak.h
===================================================================
--- trunk/memcheck/tests/leak.h 2010-09-03 14:36:50 UTC (rev 11335)
+++ trunk/memcheck/tests/leak.h 2010-09-03 15:14:41 UTC (rev 11336)
@@ -41,3 +41,27 @@
S_bytes,S_blocks); \
} while (0)
+/* Upon a call to a function, some architectures store pointers into
+ * into registers. Valgrind may consider these registers when determining
+ * whether an address is reachable, so we need to zero-out these registers
+ * as needed.
+ */
+#if defined __powerpc__
+#define CLEAR_CALLER_SAVED_REGS \
+ do { \
+ __asm__ __volatile__( "li 3, 0" : : :/*trash*/"r3" ); \
+ __asm__ __volatile__( "li 4, 0" : : :/*trash*/"r4" ); \
+ __asm__ __volatile__( "li 5, 0" : : :/*trash*/"r5" ); \
+ __asm__ __volatile__( "li 6, 0" : : :/*trash*/"r6" ); \
+ __asm__ __volatile__( "li 7, 0" : : :/*trash*/"r7" ); \
+ __asm__ __volatile__( "li 8, 0" : : :/*trash*/"r8" ); \
+ __asm__ __volatile__( "li 9, 0" : : :/*trash*/"r9" ); \
+ __asm__ __volatile__( "li 10, 0" : : :/*trash*/"r10" ); \
+ __asm__ __volatile__( "li 11, 0" : : :/*trash*/"r11" ); \
+ __asm__ __volatile__( "li 12, 0" : : :/*trash*/"r12" ); \
+ } while (0)
+#else
+#define CLEAR_CALLER_SAVED_REGS /*nothing*/
+#endif
+
+
|