aboutsummaryrefslogtreecommitdiff
path: root/llvm/test/CodeGen/X86/fixup-bw-copy.ll
blob: 2af90469f4cce9a802631d92ec368b54b7218c52 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=1 -mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s
; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=0 -mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s
; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=1 -mtriple=i386-- < %s | FileCheck --check-prefixes=X86,X86-BWON %s
; RUN: llc -verify-machineinstrs -fixup-byte-word-insts=0 -mtriple=i386-- < %s | FileCheck --check-prefixes=X86,X86-BWOFF %s

target datalayout = "e-m:o-p:32:32-f64:32:64-f80:128-n8:16:32-S128"

define i8 @test_movb(i8 %a0) nounwind {
; X64-LABEL: test_movb:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
;
; X86-BWON-LABEL: test_movb:
; X86-BWON:       # %bb.0:
; X86-BWON-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-BWON-NEXT:    retl
;
; X86-BWOFF-LABEL: test_movb:
; X86-BWOFF:       # %bb.0:
; X86-BWOFF-NEXT:    movb {{[0-9]+}}(%esp), %al
; X86-BWOFF-NEXT:    retl
  ret i8 %a0
}

define i8 @test_movb_Os(i8 %a0) nounwind optsize {
; X64-LABEL: test_movb_Os:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
;
; X86-LABEL: test_movb_Os:
; X86:       # %bb.0:
; X86-NEXT:    movb {{[0-9]+}}(%esp), %al
; X86-NEXT:    retl
  ret i8 %a0
}

define i8 @test_movb_Oz(i8 %a0) nounwind minsize {
; X64-LABEL: test_movb_Oz:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
;
; X86-LABEL: test_movb_Oz:
; X86:       # %bb.0:
; X86-NEXT:    movb {{[0-9]+}}(%esp), %al
; X86-NEXT:    retl
  ret i8 %a0
}

define i16 @test_movw(i16 %a0) {
; X64-LABEL: test_movw:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    retq
;
; X86-BWON-LABEL: test_movw:
; X86-BWON:       # %bb.0:
; X86-BWON-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X86-BWON-NEXT:    retl
;
; X86-BWOFF-LABEL: test_movw:
; X86-BWOFF:       # %bb.0:
; X86-BWOFF-NEXT:    movw {{[0-9]+}}(%esp), %ax
; X86-BWOFF-NEXT:    retl
  ret i16 %a0
}

; Verify we don't mess with H-reg copies (only generated in 32-bit mode).
define i8 @test_movb_hreg(i16 %a0) {
; X64-LABEL: test_movb_hreg:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    shrl $8, %eax
; X64-NEXT:    addl %edi, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
;
; X86-LABEL: test_movb_hreg:
; X86:       # %bb.0:
; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    addb %al, %ah
; X86-NEXT:    movb %ah, %al
; X86-NEXT:    retl
  %tmp0 = trunc i16 %a0 to i8
  %tmp1 = lshr i16 %a0, 8
  %tmp2 = trunc i16 %tmp1 to i8
  %tmp3 = add i8 %tmp0, %tmp2
  ret i8 %tmp3
}