valgrind/valgrind-3.18.1-amd64-more-spec-rules.patch
Mark Wielaard 85a0e79c10 valgrind 3.18.1 fixups
Resolves: #2025643
Arch fixups for valgrind 3.18.1
2021-11-24 18:02:11 +01:00

106 lines
4.0 KiB
Diff

commit 595341b150312d2407bd43304449bf39ec3e1fa8
Author: Julian Seward <jseward@acm.org>
Date: Sat Nov 13 19:59:07 2021 +0100
amd64 front end: add more spec rules:
S after SHRQ
Z after SHLQ
NZ after SHLQ
Z after SHLL
S after SHLL
The lack of at least one of these was observed to cause occasional false
positives in Memcheck.
Plus add commented-out cases so as to complete the set of 12 rules
{Z,NZ,S,NS} after {SHRQ,SHLQ,SHLL}. The commented-out ones are commented
out because I so far didn't find any use cases for them.
diff --git a/VEX/priv/guest_amd64_helpers.c b/VEX/priv/guest_amd64_helpers.c
index 9d61e7a0f..ba71c1b62 100644
--- a/VEX/priv/guest_amd64_helpers.c
+++ b/VEX/priv/guest_amd64_helpers.c
@@ -1823,16 +1823,26 @@ IRExpr* guest_amd64_spechelper ( const HChar* function_name,
/*---------------- SHRQ ----------------*/
if (isU64(cc_op, AMD64G_CC_OP_SHRQ) && isU64(cond, AMD64CondZ)) {
- /* SHRQ, then Z --> test dep1 == 0 */
+ /* SHRQ, then Z --> test result[63:0] == 0 */
return unop(Iop_1Uto64,
binop(Iop_CmpEQ64, cc_dep1, mkU64(0)));
}
if (isU64(cc_op, AMD64G_CC_OP_SHRQ) && isU64(cond, AMD64CondNZ)) {
- /* SHRQ, then NZ --> test dep1 != 0 */
+ /* SHRQ, then NZ --> test result[63:0] != 0 */
return unop(Iop_1Uto64,
binop(Iop_CmpNE64, cc_dep1, mkU64(0)));
}
+ if (isU64(cc_op, AMD64G_CC_OP_SHRQ) && isU64(cond, AMD64CondS)) {
+ /* SHRQ, then S --> (ULong)result[63] (result is in dep1) */
+ return binop(Iop_Shr64, cc_dep1, mkU8(63));
+ }
+ // No known test case for this, hence disabled:
+ //if (isU64(cc_op, AMD64G_CC_OP_SHRQ) && isU64(cond, AMD64CondNS)) {
+ // /* SHRQ, then NS --> (ULong) ~ result[63] */
+ // vassert(0);
+ //}
+
/*---------------- SHRL ----------------*/
if (isU64(cc_op, AMD64G_CC_OP_SHRL) && isU64(cond, AMD64CondZ)) {
@@ -1881,6 +1891,52 @@ IRExpr* guest_amd64_spechelper ( const HChar* function_name,
// mkU32(0)));
//}
+ /*---------------- SHLQ ----------------*/
+
+ if (isU64(cc_op, AMD64G_CC_OP_SHLQ) && isU64(cond, AMD64CondZ)) {
+ /* SHLQ, then Z --> test dep1 == 0 */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpEQ64, cc_dep1, mkU64(0)));
+ }
+ if (isU64(cc_op, AMD64G_CC_OP_SHLQ) && isU64(cond, AMD64CondNZ)) {
+ /* SHLQ, then NZ --> test dep1 != 0 */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpNE64, cc_dep1, mkU64(0)));
+ }
+
+ //if (isU64(cc_op, AMD64G_CC_OP_SHLQ) && isU64(cond, AMD64CondS)) {
+ // /* SHLQ, then S --> (ULong)result[63] */
+ // vassert(0);
+ //}
+ //if (isU64(cc_op, AMD64G_CC_OP_SHLQ) && isU64(cond, AMD64CondNS)) {
+ // /* SHLQ, then NS --> (ULong) ~ result[63] */
+ // vassert(0);
+ //}
+
+ /*---------------- SHLL ----------------*/
+
+ if (isU64(cc_op, AMD64G_CC_OP_SHLL) && isU64(cond, AMD64CondZ)) {
+ /* SHLL, then Z --> test result[31:0] == 0 */
+ return unop(Iop_1Uto64,
+ binop(Iop_CmpEQ32, unop(Iop_64to32, cc_dep1),
+ mkU32(0)));
+ }
+ //if (isU64(cc_op, AMD64G_CC_OP_SHLL) && isU64(cond, AMD64CondNZ)) {
+ // /* SHLL, then NZ --> test dep1 != 0 */
+ // vassert(0);
+ //}
+
+ if (isU64(cc_op, AMD64G_CC_OP_SHLL) && isU64(cond, AMD64CondS)) {
+ /* SHLL, then S --> (ULong)result[31] */
+ return binop(Iop_And64,
+ binop(Iop_Shr64, cc_dep1, mkU8(31)),
+ mkU64(1));
+ }
+ //if (isU64(cc_op, AMD64G_CC_OP_SHLL) && isU64(cond, AMD64CondNS)) {
+ // /* SHLL, then NS --> (ULong) ~ result[31] */
+ // vassert(0);
+ //}
+
/*---------------- COPY ----------------*/
/* This can happen, as a result of amd64 FP compares: "comisd ... ;
jbe" for example. */