aboutsummaryrefslogtreecommitdiff
path: root/llvm/test/CodeGen/RISCV/rvv/fixed-vectors-vandn.ll
blob: ae7f8ed78aa064a5c454808ac4fb1ab37297e5ab (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
; RUN: llc -mtriple=riscv32 -mattr=+v,+zvkb -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-ZVKB
; RUN: llc -mtriple=riscv64 -mattr=+v,+zvkb -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK-ZVKB

define <8 x i8> @not_signbit_mask_v8i8(<8 x i8> %a, <8 x i8> %b) {
; CHECK-LABEL: not_signbit_mask_v8i8:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
; CHECK-NEXT:    vmsgt.vi v0, v8, -1
; CHECK-NEXT:    vmv.v.i v8, 0
; CHECK-NEXT:    vmerge.vvm v8, v8, v9, v0
; CHECK-NEXT:    ret
;
; CHECK-ZVKB-LABEL: not_signbit_mask_v8i8:
; CHECK-ZVKB:       # %bb.0:
; CHECK-ZVKB-NEXT:    vsetivli zero, 8, e8, mf2, ta, ma
; CHECK-ZVKB-NEXT:    vsra.vi v8, v8, 7
; CHECK-ZVKB-NEXT:    vandn.vv v8, v9, v8
; CHECK-ZVKB-NEXT:    ret
  %cond = icmp sgt <8 x i8> %a, splat (i8 -1)
  %r = select <8 x i1> %cond, <8 x i8> %b, <8 x i8> zeroinitializer
  ret <8 x i8> %r
}

define <4 x i16> @not_signbit_mask_v4i16(<4 x i16> %a, <4 x i16> %b) {
; CHECK-LABEL: not_signbit_mask_v4i16:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
; CHECK-NEXT:    vmsgt.vi v0, v8, -1
; CHECK-NEXT:    vmv.v.i v8, 0
; CHECK-NEXT:    vmerge.vvm v8, v8, v9, v0
; CHECK-NEXT:    ret
;
; CHECK-ZVKB-LABEL: not_signbit_mask_v4i16:
; CHECK-ZVKB:       # %bb.0:
; CHECK-ZVKB-NEXT:    vsetivli zero, 4, e16, mf2, ta, ma
; CHECK-ZVKB-NEXT:    vsra.vi v8, v8, 15
; CHECK-ZVKB-NEXT:    vandn.vv v8, v9, v8
; CHECK-ZVKB-NEXT:    ret
  %cond = icmp sgt <4 x i16> %a, splat (i16 -1)
  %r = select <4 x i1> %cond, <4 x i16> %b, <4 x i16> zeroinitializer
  ret <4 x i16> %r
}

define <2 x i32> @not_signbit_mask_v2i32(<2 x i32> %a, <2 x i32> %b) {
; CHECK-LABEL: not_signbit_mask_v2i32:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
; CHECK-NEXT:    vmsgt.vi v0, v8, -1
; CHECK-NEXT:    vmv.v.i v8, 0
; CHECK-NEXT:    vmerge.vvm v8, v8, v9, v0
; CHECK-NEXT:    ret
;
; CHECK-ZVKB-LABEL: not_signbit_mask_v2i32:
; CHECK-ZVKB:       # %bb.0:
; CHECK-ZVKB-NEXT:    vsetivli zero, 2, e32, mf2, ta, ma
; CHECK-ZVKB-NEXT:    vsra.vi v8, v8, 31
; CHECK-ZVKB-NEXT:    vandn.vv v8, v9, v8
; CHECK-ZVKB-NEXT:    ret
  %cond = icmp sgt <2 x i32> %a, splat (i32 -1)
  %r = select <2 x i1> %cond, <2 x i32> %b, <2 x i32> zeroinitializer
  ret <2 x i32> %r
}

define <2 x i64> @not_signbit_mask_v2i64(<2 x i64> %a, <2 x i64> %b) {
; CHECK-LABEL: not_signbit_mask_v2i64:
; CHECK:       # %bb.0:
; CHECK-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
; CHECK-NEXT:    vmsgt.vi v0, v8, -1
; CHECK-NEXT:    vmv.v.i v8, 0
; CHECK-NEXT:    vmerge.vvm v8, v8, v9, v0
; CHECK-NEXT:    ret
;
; CHECK-ZVKB-LABEL: not_signbit_mask_v2i64:
; CHECK-ZVKB:       # %bb.0:
; CHECK-ZVKB-NEXT:    li a0, 63
; CHECK-ZVKB-NEXT:    vsetivli zero, 2, e64, m1, ta, ma
; CHECK-ZVKB-NEXT:    vsra.vx v8, v8, a0
; CHECK-ZVKB-NEXT:    vandn.vv v8, v9, v8
; CHECK-ZVKB-NEXT:    ret
  %cond = icmp sgt <2 x i64> %a, splat (i64 -1)
  %r = select <2 x i1> %cond, <2 x i64> %b, <2 x i64> zeroinitializer
  ret <2 x i64> %r
}