aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gdb/testsuite/ChangeLog5
-rw-r--r--gdb/testsuite/gdb.reverse/insn-reverse-x86.c8
2 files changed, 13 insertions, 0 deletions
diff --git a/gdb/testsuite/ChangeLog b/gdb/testsuite/ChangeLog
index 36953af..e7408a0 100644
--- a/gdb/testsuite/ChangeLog
+++ b/gdb/testsuite/ChangeLog
@@ -1,5 +1,10 @@
2020-12-04 Tom de Vries <tdevries@suse.de>
+ * gdb.reverse/insn-reverse-x86.c: Guard x86_64 assembly with #ifdef
+ __x86_64__.
+
+2020-12-04 Tom de Vries <tdevries@suse.de>
+
* gdb.reverse/insn-reverse.c (test_nr): New var.
(usage, parse_args): New function.
(main): Call parse_args. Only run test for test_nr.
diff --git a/gdb/testsuite/gdb.reverse/insn-reverse-x86.c b/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
index 22ba97a..4392cb6 100644
--- a/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
+++ b/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
@@ -85,6 +85,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%sp;" : "=r" (number));
__asm__ volatile ("mov %%ax, %%sp;" : "=r" (number));
+#ifdef __x86_64__
__asm__ volatile ("rdrand %%r8w;" : "=r" (number));
__asm__ volatile ("rdrand %%r9w;" : "=r" (number));
__asm__ volatile ("rdrand %%r10w;" : "=r" (number));
@@ -93,6 +94,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%r13w;" : "=r" (number));
__asm__ volatile ("rdrand %%r14w;" : "=r" (number));
__asm__ volatile ("rdrand %%r15w;" : "=r" (number));
+#endif
/* 32-bit random numbers. */
__asm__ volatile ("rdrand %%eax;" : "=r" (number));
@@ -100,6 +102,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%ecx;" : "=r" (number));
__asm__ volatile ("rdrand %%edx;" : "=r" (number));
+#ifdef __x86_64__
__asm__ volatile ("mov %%rdi, %%rax;" : "=r" (number));
__asm__ volatile ("rdrand %%edi;" : "=r" (number));
__asm__ volatile ("mov %%rax, %%rdi;" : "=r" (number));
@@ -155,6 +158,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%r13;" : "=r" (number));
__asm__ volatile ("rdrand %%r14;" : "=r" (number));
__asm__ volatile ("rdrand %%r15;" : "=r" (number));
+#endif
}
/* Test rdseed support for various output registers. */
@@ -190,6 +194,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%sp;" : "=r" (seed));
__asm__ volatile ("mov %%ax, %%sp;" : "=r" (seed));
+#ifdef __x86_64__
__asm__ volatile ("rdseed %%r8w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r9w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r10w;" : "=r" (seed));
@@ -198,6 +203,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%r13w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r14w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r15w;" : "=r" (seed));
+#endif
/* 32-bit random seeds. */
__asm__ volatile ("rdseed %%eax;" : "=r" (seed));
@@ -205,6 +211,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%ecx;" : "=r" (seed));
__asm__ volatile ("rdseed %%edx;" : "=r" (seed));
+#ifdef __x86_64__
__asm__ volatile ("mov %%rdi, %%rax;" : "=r" (seed));
__asm__ volatile ("rdseed %%edi;" : "=r" (seed));
__asm__ volatile ("mov %%rax, %%rdi;" : "=r" (seed));
@@ -260,6 +267,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%r13;" : "=r" (seed));
__asm__ volatile ("rdseed %%r14;" : "=r" (seed));
__asm__ volatile ("rdseed %%r15;" : "=r" (seed));
+#endif
}
/* Initialize arch-specific bits. */