aboutsummaryrefslogtreecommitdiff
path: root/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
diff options
context:
space:
mode:
authorTom de Vries <tdevries@suse.de>2020-12-04 13:36:47 +0100
committerTom de Vries <tdevries@suse.de>2020-12-04 13:36:47 +0100
commita1499830feea0dc134e8b08d7cc5b6a52e370294 (patch)
treecaaf3dd7059a43ac12797a9b7a3b4ccdf884f5b4 /gdb/testsuite/gdb.reverse/insn-reverse-x86.c
parent9c027c2f6c5cdb3db0b8c72c06b691c5ba502279 (diff)
downloadgdb-a1499830feea0dc134e8b08d7cc5b6a52e370294.zip
gdb-a1499830feea0dc134e8b08d7cc5b6a52e370294.tar.gz
gdb-a1499830feea0dc134e8b08d7cc5b6a52e370294.tar.bz2
[gdb/testsuite] Fix gdb.reverse/insn-reverse-x86.c for -m32
When running test-case gdb.reverse/insn-reverse.exp with target board unix/-m32, we get: ... spawn -ignore SIGHUP gcc -fno-stack-protector -fdiagnostics-color=never \ -c -g -m32 -o insn-reverse0.o insn-reverse.c^M insn-reverse-x86.c: Assembler messages:^M insn-reverse-x86.c:88: Error: bad register name `%r8w'^M .... Fix this by guarding x86_64 assembly in insn-reverse-x86.c with #ifdef __x86_64__. Tested on x86_64-linux, with native and unix/-m32. gdb/testsuite/ChangeLog: 2020-12-04 Tom de Vries <tdevries@suse.de> * gdb.reverse/insn-reverse-x86.c: Guard x86_64 assembly with #ifdef __x86_64__.
Diffstat (limited to 'gdb/testsuite/gdb.reverse/insn-reverse-x86.c')
-rw-r--r--gdb/testsuite/gdb.reverse/insn-reverse-x86.c8
1 files changed, 8 insertions, 0 deletions
diff --git a/gdb/testsuite/gdb.reverse/insn-reverse-x86.c b/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
index 22ba97a..4392cb6 100644
--- a/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
+++ b/gdb/testsuite/gdb.reverse/insn-reverse-x86.c
@@ -85,6 +85,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%sp;" : "=r" (number));
__asm__ volatile ("mov %%ax, %%sp;" : "=r" (number));
+#ifdef __x86_64__
__asm__ volatile ("rdrand %%r8w;" : "=r" (number));
__asm__ volatile ("rdrand %%r9w;" : "=r" (number));
__asm__ volatile ("rdrand %%r10w;" : "=r" (number));
@@ -93,6 +94,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%r13w;" : "=r" (number));
__asm__ volatile ("rdrand %%r14w;" : "=r" (number));
__asm__ volatile ("rdrand %%r15w;" : "=r" (number));
+#endif
/* 32-bit random numbers. */
__asm__ volatile ("rdrand %%eax;" : "=r" (number));
@@ -100,6 +102,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%ecx;" : "=r" (number));
__asm__ volatile ("rdrand %%edx;" : "=r" (number));
+#ifdef __x86_64__
__asm__ volatile ("mov %%rdi, %%rax;" : "=r" (number));
__asm__ volatile ("rdrand %%edi;" : "=r" (number));
__asm__ volatile ("mov %%rax, %%rdi;" : "=r" (number));
@@ -155,6 +158,7 @@ rdrand (void)
__asm__ volatile ("rdrand %%r13;" : "=r" (number));
__asm__ volatile ("rdrand %%r14;" : "=r" (number));
__asm__ volatile ("rdrand %%r15;" : "=r" (number));
+#endif
}
/* Test rdseed support for various output registers. */
@@ -190,6 +194,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%sp;" : "=r" (seed));
__asm__ volatile ("mov %%ax, %%sp;" : "=r" (seed));
+#ifdef __x86_64__
__asm__ volatile ("rdseed %%r8w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r9w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r10w;" : "=r" (seed));
@@ -198,6 +203,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%r13w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r14w;" : "=r" (seed));
__asm__ volatile ("rdseed %%r15w;" : "=r" (seed));
+#endif
/* 32-bit random seeds. */
__asm__ volatile ("rdseed %%eax;" : "=r" (seed));
@@ -205,6 +211,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%ecx;" : "=r" (seed));
__asm__ volatile ("rdseed %%edx;" : "=r" (seed));
+#ifdef __x86_64__
__asm__ volatile ("mov %%rdi, %%rax;" : "=r" (seed));
__asm__ volatile ("rdseed %%edi;" : "=r" (seed));
__asm__ volatile ("mov %%rax, %%rdi;" : "=r" (seed));
@@ -260,6 +267,7 @@ rdseed (void)
__asm__ volatile ("rdseed %%r13;" : "=r" (seed));
__asm__ volatile ("rdseed %%r14;" : "=r" (seed));
__asm__ volatile ("rdseed %%r15;" : "=r" (seed));
+#endif
}
/* Initialize arch-specific bits. */