aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Target/LoongArch/LoongArchLASXInstrInfo.td
blob: a0107e44b421b70a5b81482a4e7dadaefcbdb7c3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
//=- LoongArchLASXInstrInfo.td - LoongArch LASX instructions -*- tablegen -*-=//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file describes the Advanced SIMD extension instructions.
//
//===----------------------------------------------------------------------===//

// Target nodes.
def loongarch_xvpermi: SDNode<"LoongArchISD::XVPERMI", SDT_LoongArchV1RUimm>;
def loongarch_xvmskltz: SDNode<"LoongArchISD::XVMSKLTZ", SDT_LoongArchVMSKCOND>;
def loongarch_xvmskgez: SDNode<"LoongArchISD::XVMSKGEZ", SDT_LoongArchVMSKCOND>;
def loongarch_xvmskeqz: SDNode<"LoongArchISD::XVMSKEQZ", SDT_LoongArchVMSKCOND>;
def loongarch_xvmsknez: SDNode<"LoongArchISD::XVMSKNEZ", SDT_LoongArchVMSKCOND>;

def lasxsplati8
  : PatFrag<(ops node:$e0),
            (v32i8 (build_vector node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0))>;
def lasxsplati16
  : PatFrag<(ops node:$e0),
            (v16i16 (build_vector node:$e0, node:$e0, node:$e0, node:$e0,
                                  node:$e0, node:$e0, node:$e0, node:$e0,
                                  node:$e0, node:$e0, node:$e0, node:$e0,
                                  node:$e0, node:$e0, node:$e0, node:$e0))>;
def lasxsplati32
  : PatFrag<(ops node:$e0),
            (v8i32 (build_vector node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0))>;
def lasxsplati64
  : PatFrag<(ops node:$e0),
            (v4i64 (build_vector node:$e0, node:$e0, node:$e0, node:$e0))>;
def lasxsplatf32
  : PatFrag<(ops node:$e0),
            (v8f32 (build_vector node:$e0, node:$e0, node:$e0, node:$e0,
                                 node:$e0, node:$e0, node:$e0, node:$e0))>;
def lasxsplatf64
  : PatFrag<(ops node:$e0),
            (v4f64 (build_vector node:$e0, node:$e0, node:$e0, node:$e0))>;

//===----------------------------------------------------------------------===//
// Instruction class templates
//===----------------------------------------------------------------------===//

class LASX1RI13_XI<bits<32> op, Operand ImmOpnd = simm13>
    : Fmt1RI13_XI<op, (outs LASX256:$xd), (ins ImmOpnd:$imm13), "$xd, $imm13">;

class LASX2R_XX<bits<32> op>
    : Fmt2R_XX<op, (outs LASX256:$xd), (ins LASX256:$xj), "$xd, $xj">;

class LASX2R_XR<bits<32> op>
    : Fmt2R_XR<op, (outs LASX256:$xd), (ins GPR:$rj), "$xd, $rj">;

class LASX2R_CX<bits<32> op>
    : Fmt2R_CX<op, (outs CFR:$cd), (ins LASX256:$xj), "$cd, $xj">;

class LASX2RI1_XXI<bits<32> op, Operand ImmOpnd = uimm1>
    : Fmt2RI1_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm1),
                  "$xd, $xj, $imm1">;

class LASX2RI2_XXI<bits<32> op, Operand ImmOpnd = uimm2>
    : Fmt2RI2_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm2),
                  "$xd, $xj, $imm2">;

class LASX2RI2_RXI<bits<32> op, Operand ImmOpnd = uimm2>
    : Fmt2RI2_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm2),
                  "$rd, $xj, $imm2">;

class LASX2RI3_XXI<bits<32> op, Operand ImmOpnd = uimm3>
    : Fmt2RI3_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm3),
                  "$xd, $xj, $imm3">;

class LASX2RI3_RXI<bits<32> op, Operand ImmOpnd = uimm3>
    : Fmt2RI3_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm3),
                  "$rd, $xj, $imm3">;

class LASX2RI4_XXI<bits<32> op, Operand ImmOpnd = uimm4>
    : Fmt2RI4_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm4),
                  "$xd, $xj, $imm4">;

class LASX2RI4_XRI<bits<32> op, Operand ImmOpnd = uimm4>
    : Fmt2RI4_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm4),
                  "$xd, $rj, $imm4">;

class LASX2RI4_RXI<bits<32> op, Operand ImmOpnd = uimm4>
    : Fmt2RI4_RXI<op, (outs GPR:$rd), (ins LASX256:$xj, ImmOpnd:$imm4),
                  "$rd, $xj, $imm4">;

class LASX2RI5_XXI<bits<32> op, Operand ImmOpnd = uimm5>
    : Fmt2RI5_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm5),
                  "$xd, $xj, $imm5">;

class LASX2RI6_XXI<bits<32> op, Operand ImmOpnd = uimm6>
    : Fmt2RI6_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm6),
                  "$xd, $xj, $imm6">;

class LASX2RI8_XXI<bits<32> op, Operand ImmOpnd = uimm8>
    : Fmt2RI8_XXI<op, (outs LASX256:$xd), (ins LASX256:$xj, ImmOpnd:$imm8),
                  "$xd, $xj, $imm8">;

class LASX2RI8I2_XRII<bits<32> op, Operand ImmOpnd = simm8,
                     Operand IdxOpnd = uimm2>
    : Fmt2RI8I2_XRII<op, (outs),
                     (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm2),
                     "$xd, $rj, $imm8, $imm2">;
class LASX2RI8I3_XRII<bits<32> op, Operand ImmOpnd = simm8,
                     Operand IdxOpnd = uimm3>
    : Fmt2RI8I3_XRII<op, (outs),
                     (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm3),
                     "$xd, $rj, $imm8, $imm3">;
class LASX2RI8I4_XRII<bits<32> op, Operand ImmOpnd = simm8,
                     Operand IdxOpnd = uimm4>
    : Fmt2RI8I4_XRII<op, (outs),
                     (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm4),
                     "$xd, $rj, $imm8, $imm4">;
class LASX2RI8I5_XRII<bits<32> op, Operand ImmOpnd = simm8,
                     Operand IdxOpnd = uimm5>
    : Fmt2RI8I5_XRII<op, (outs),
                     (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm8, IdxOpnd:$imm5),
                     "$xd, $rj, $imm8, $imm5">;

class LASX3R_XXX<bits<32> op>
    : Fmt3R_XXX<op, (outs LASX256:$xd), (ins LASX256:$xj, LASX256:$xk),
                "$xd, $xj, $xk">;

class LASX3R_XXR<bits<32> op>
    : Fmt3R_XXR<op, (outs LASX256:$xd), (ins LASX256:$xj, GPR:$rk),
                "$xd, $xj, $rk">;

class LASX4R_XXXX<bits<32> op>
    : Fmt4R_XXXX<op, (outs LASX256:$xd),
                 (ins LASX256:$xj, LASX256:$xk, LASX256:$xa),
                 "$xd, $xj, $xk, $xa">;

let Constraints = "$xd = $dst" in {

class LASX2RI2_XXXI<bits<32> op, Operand ImmOpnd = uimm2>
    : Fmt2RI2_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm2),
                  "$xd, $xj, $imm2">;
class LASX2RI3_XXXI<bits<32> op, Operand ImmOpnd = uimm3>
    : Fmt2RI3_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm3),
                  "$xd, $xj, $imm3">;

class LASX2RI2_XXRI<bits<32> op, Operand ImmOpnd = uimm2>
    : Fmt2RI2_XRI<op, (outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm2),
                  "$xd, $rj, $imm2">;
class LASX2RI3_XXRI<bits<32> op, Operand ImmOpnd = uimm3>
    : Fmt2RI3_XRI<op, (outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm3),
                  "$xd, $rj, $imm3">;

class LASX2RI4_XXXI<bits<32> op, Operand ImmOpnd = uimm4>
    : Fmt2RI4_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm4),
                  "$xd, $xj, $imm4">;
class LASX2RI5_XXXI<bits<32> op, Operand ImmOpnd = uimm5>
    : Fmt2RI5_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm5),
                  "$xd, $xj, $imm5">;
class LASX2RI6_XXXI<bits<32> op, Operand ImmOpnd = uimm6>
    : Fmt2RI6_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm6),
                  "$xd, $xj, $imm6">;
class LASX2RI7_XXXI<bits<32> op, Operand ImmOpnd = uimm7>
    : Fmt2RI7_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm7),
                  "$xd, $xj, $imm7">;

class LASX2RI8_XXXI<bits<32> op, Operand ImmOpnd = uimm8>
    : Fmt2RI8_XXI<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, ImmOpnd:$imm8),
                  "$xd, $xj, $imm8">;

class LASX3R_XXXX<bits<32> op>
    : Fmt3R_XXX<op, (outs LASX256:$dst), (ins LASX256:$xd, LASX256:$xj, LASX256:$xk),
                "$xd, $xj, $xk">;

} // Constraints = "$xd = $dst"

class LASX2RI9_Load<bits<32> op, Operand ImmOpnd = simm9_lsl3>
    : Fmt2RI9_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm9),
                  "$xd, $rj, $imm9">;
class LASX2RI10_Load<bits<32> op, Operand ImmOpnd = simm10_lsl2>
    : Fmt2RI10_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm10),
                  "$xd, $rj, $imm10">;
class LASX2RI11_Load<bits<32> op, Operand ImmOpnd = simm11_lsl1>
    : Fmt2RI11_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm11),
                  "$xd, $rj, $imm11">;
class LASX2RI12_Load<bits<32> op, Operand ImmOpnd = simm12_addlike>
    : Fmt2RI12_XRI<op, (outs LASX256:$xd), (ins GPR:$rj, ImmOpnd:$imm12),
                  "$xd, $rj, $imm12">;
class LASX2RI12_Store<bits<32> op, Operand ImmOpnd = simm12_addlike>
    : Fmt2RI12_XRI<op, (outs), (ins LASX256:$xd, GPR:$rj, ImmOpnd:$imm12),
                  "$xd, $rj, $imm12">;

class LASX3R_Load<bits<32> op>
    : Fmt3R_XRR<op, (outs LASX256:$xd), (ins GPR:$rj, GPR:$rk),
                "$xd, $rj, $rk">;
class LASX3R_Store<bits<32> op>
    : Fmt3R_XRR<op, (outs), (ins LASX256:$xd, GPR:$rj, GPR:$rk),
                "$xd, $rj, $rk">;

//===----------------------------------------------------------------------===//
// Instructions
//===----------------------------------------------------------------------===//

let hasSideEffects = 0, Predicates = [HasExtLASX] in {

let mayLoad = 0, mayStore = 0 in {
def XVADD_B : LASX3R_XXX<0x740a0000>;
def XVADD_H : LASX3R_XXX<0x740a8000>;
def XVADD_W : LASX3R_XXX<0x740b0000>;
def XVADD_D : LASX3R_XXX<0x740b8000>;
def XVADD_Q : LASX3R_XXX<0x752d0000>;

def XVSUB_B : LASX3R_XXX<0x740c0000>;
def XVSUB_H : LASX3R_XXX<0x740c8000>;
def XVSUB_W : LASX3R_XXX<0x740d0000>;
def XVSUB_D : LASX3R_XXX<0x740d8000>;
def XVSUB_Q : LASX3R_XXX<0x752d8000>;

def XVADDI_BU : LASX2RI5_XXI<0x768a0000>;
def XVADDI_HU : LASX2RI5_XXI<0x768a8000>;
def XVADDI_WU : LASX2RI5_XXI<0x768b0000>;
def XVADDI_DU : LASX2RI5_XXI<0x768b8000>;

def XVSUBI_BU : LASX2RI5_XXI<0x768c0000>;
def XVSUBI_HU : LASX2RI5_XXI<0x768c8000>;
def XVSUBI_WU : LASX2RI5_XXI<0x768d0000>;
def XVSUBI_DU : LASX2RI5_XXI<0x768d8000>;

def XVNEG_B : LASX2R_XX<0x769c3000>;
def XVNEG_H : LASX2R_XX<0x769c3400>;
def XVNEG_W : LASX2R_XX<0x769c3800>;
def XVNEG_D : LASX2R_XX<0x769c3c00>;

def XVSADD_B : LASX3R_XXX<0x74460000>;
def XVSADD_H : LASX3R_XXX<0x74468000>;
def XVSADD_W : LASX3R_XXX<0x74470000>;
def XVSADD_D : LASX3R_XXX<0x74478000>;
def XVSADD_BU : LASX3R_XXX<0x744a0000>;
def XVSADD_HU : LASX3R_XXX<0x744a8000>;
def XVSADD_WU : LASX3R_XXX<0x744b0000>;
def XVSADD_DU : LASX3R_XXX<0x744b8000>;

def XVSSUB_B : LASX3R_XXX<0x74480000>;
def XVSSUB_H : LASX3R_XXX<0x74488000>;
def XVSSUB_W : LASX3R_XXX<0x74490000>;
def XVSSUB_D : LASX3R_XXX<0x74498000>;
def XVSSUB_BU : LASX3R_XXX<0x744c0000>;
def XVSSUB_HU : LASX3R_XXX<0x744c8000>;
def XVSSUB_WU : LASX3R_XXX<0x744d0000>;
def XVSSUB_DU : LASX3R_XXX<0x744d8000>;

def XVHADDW_H_B : LASX3R_XXX<0x74540000>;
def XVHADDW_W_H : LASX3R_XXX<0x74548000>;
def XVHADDW_D_W : LASX3R_XXX<0x74550000>;
def XVHADDW_Q_D : LASX3R_XXX<0x74558000>;
def XVHADDW_HU_BU : LASX3R_XXX<0x74580000>;
def XVHADDW_WU_HU : LASX3R_XXX<0x74588000>;
def XVHADDW_DU_WU : LASX3R_XXX<0x74590000>;
def XVHADDW_QU_DU : LASX3R_XXX<0x74598000>;

def XVHSUBW_H_B : LASX3R_XXX<0x74560000>;
def XVHSUBW_W_H : LASX3R_XXX<0x74568000>;
def XVHSUBW_D_W : LASX3R_XXX<0x74570000>;
def XVHSUBW_Q_D : LASX3R_XXX<0x74578000>;
def XVHSUBW_HU_BU : LASX3R_XXX<0x745a0000>;
def XVHSUBW_WU_HU : LASX3R_XXX<0x745a8000>;
def XVHSUBW_DU_WU : LASX3R_XXX<0x745b0000>;
def XVHSUBW_QU_DU : LASX3R_XXX<0x745b8000>;

def XVADDWEV_H_B : LASX3R_XXX<0x741e0000>;
def XVADDWEV_W_H : LASX3R_XXX<0x741e8000>;
def XVADDWEV_D_W : LASX3R_XXX<0x741f0000>;
def XVADDWEV_Q_D : LASX3R_XXX<0x741f8000>;
def XVADDWOD_H_B : LASX3R_XXX<0x74220000>;
def XVADDWOD_W_H : LASX3R_XXX<0x74228000>;
def XVADDWOD_D_W : LASX3R_XXX<0x74230000>;
def XVADDWOD_Q_D : LASX3R_XXX<0x74238000>;

def XVSUBWEV_H_B : LASX3R_XXX<0x74200000>;
def XVSUBWEV_W_H : LASX3R_XXX<0x74208000>;
def XVSUBWEV_D_W : LASX3R_XXX<0x74210000>;
def XVSUBWEV_Q_D : LASX3R_XXX<0x74218000>;
def XVSUBWOD_H_B : LASX3R_XXX<0x74240000>;
def XVSUBWOD_W_H : LASX3R_XXX<0x74248000>;
def XVSUBWOD_D_W : LASX3R_XXX<0x74250000>;
def XVSUBWOD_Q_D : LASX3R_XXX<0x74258000>;

def XVADDWEV_H_BU : LASX3R_XXX<0x742e0000>;
def XVADDWEV_W_HU : LASX3R_XXX<0x742e8000>;
def XVADDWEV_D_WU : LASX3R_XXX<0x742f0000>;
def XVADDWEV_Q_DU : LASX3R_XXX<0x742f8000>;
def XVADDWOD_H_BU : LASX3R_XXX<0x74320000>;
def XVADDWOD_W_HU : LASX3R_XXX<0x74328000>;
def XVADDWOD_D_WU : LASX3R_XXX<0x74330000>;
def XVADDWOD_Q_DU : LASX3R_XXX<0x74338000>;

def XVSUBWEV_H_BU : LASX3R_XXX<0x74300000>;
def XVSUBWEV_W_HU : LASX3R_XXX<0x74308000>;
def XVSUBWEV_D_WU : LASX3R_XXX<0x74310000>;
def XVSUBWEV_Q_DU : LASX3R_XXX<0x74318000>;
def XVSUBWOD_H_BU : LASX3R_XXX<0x74340000>;
def XVSUBWOD_W_HU : LASX3R_XXX<0x74348000>;
def XVSUBWOD_D_WU : LASX3R_XXX<0x74350000>;
def XVSUBWOD_Q_DU : LASX3R_XXX<0x74358000>;

def XVADDWEV_H_BU_B : LASX3R_XXX<0x743e0000>;
def XVADDWEV_W_HU_H : LASX3R_XXX<0x743e8000>;
def XVADDWEV_D_WU_W : LASX3R_XXX<0x743f0000>;
def XVADDWEV_Q_DU_D : LASX3R_XXX<0x743f8000>;
def XVADDWOD_H_BU_B : LASX3R_XXX<0x74400000>;
def XVADDWOD_W_HU_H : LASX3R_XXX<0x74408000>;
def XVADDWOD_D_WU_W : LASX3R_XXX<0x74410000>;
def XVADDWOD_Q_DU_D : LASX3R_XXX<0x74418000>;

def XVAVG_B : LASX3R_XXX<0x74640000>;
def XVAVG_H : LASX3R_XXX<0x74648000>;
def XVAVG_W : LASX3R_XXX<0x74650000>;
def XVAVG_D : LASX3R_XXX<0x74658000>;
def XVAVG_BU : LASX3R_XXX<0x74660000>;
def XVAVG_HU : LASX3R_XXX<0x74668000>;
def XVAVG_WU : LASX3R_XXX<0x74670000>;
def XVAVG_DU : LASX3R_XXX<0x74678000>;
def XVAVGR_B : LASX3R_XXX<0x74680000>;
def XVAVGR_H : LASX3R_XXX<0x74688000>;
def XVAVGR_W : LASX3R_XXX<0x74690000>;
def XVAVGR_D : LASX3R_XXX<0x74698000>;
def XVAVGR_BU : LASX3R_XXX<0x746a0000>;
def XVAVGR_HU : LASX3R_XXX<0x746a8000>;
def XVAVGR_WU : LASX3R_XXX<0x746b0000>;
def XVAVGR_DU : LASX3R_XXX<0x746b8000>;

def XVABSD_B : LASX3R_XXX<0x74600000>;
def XVABSD_H : LASX3R_XXX<0x74608000>;
def XVABSD_W : LASX3R_XXX<0x74610000>;
def XVABSD_D : LASX3R_XXX<0x74618000>;
def XVABSD_BU : LASX3R_XXX<0x74620000>;
def XVABSD_HU : LASX3R_XXX<0x74628000>;
def XVABSD_WU : LASX3R_XXX<0x74630000>;
def XVABSD_DU : LASX3R_XXX<0x74638000>;

def XVADDA_B : LASX3R_XXX<0x745c0000>;
def XVADDA_H : LASX3R_XXX<0x745c8000>;
def XVADDA_W : LASX3R_XXX<0x745d0000>;
def XVADDA_D : LASX3R_XXX<0x745d8000>;

def XVMAX_B : LASX3R_XXX<0x74700000>;
def XVMAX_H : LASX3R_XXX<0x74708000>;
def XVMAX_W : LASX3R_XXX<0x74710000>;
def XVMAX_D : LASX3R_XXX<0x74718000>;
def XVMAXI_B : LASX2RI5_XXI<0x76900000, simm5>;
def XVMAXI_H : LASX2RI5_XXI<0x76908000, simm5>;
def XVMAXI_W : LASX2RI5_XXI<0x76910000, simm5>;
def XVMAXI_D : LASX2RI5_XXI<0x76918000, simm5>;
def XVMAX_BU : LASX3R_XXX<0x74740000>;
def XVMAX_HU : LASX3R_XXX<0x74748000>;
def XVMAX_WU : LASX3R_XXX<0x74750000>;
def XVMAX_DU : LASX3R_XXX<0x74758000>;
def XVMAXI_BU : LASX2RI5_XXI<0x76940000>;
def XVMAXI_HU : LASX2RI5_XXI<0x76948000>;
def XVMAXI_WU : LASX2RI5_XXI<0x76950000>;
def XVMAXI_DU : LASX2RI5_XXI<0x76958000>;

def XVMIN_B : LASX3R_XXX<0x74720000>;
def XVMIN_H : LASX3R_XXX<0x74728000>;
def XVMIN_W : LASX3R_XXX<0x74730000>;
def XVMIN_D : LASX3R_XXX<0x74738000>;
def XVMINI_B : LASX2RI5_XXI<0x76920000, simm5>;
def XVMINI_H : LASX2RI5_XXI<0x76928000, simm5>;
def XVMINI_W : LASX2RI5_XXI<0x76930000, simm5>;
def XVMINI_D : LASX2RI5_XXI<0x76938000, simm5>;
def XVMIN_BU : LASX3R_XXX<0x74760000>;
def XVMIN_HU : LASX3R_XXX<0x74768000>;
def XVMIN_WU : LASX3R_XXX<0x74770000>;
def XVMIN_DU : LASX3R_XXX<0x74778000>;
def XVMINI_BU : LASX2RI5_XXI<0x76960000>;
def XVMINI_HU : LASX2RI5_XXI<0x76968000>;
def XVMINI_WU : LASX2RI5_XXI<0x76970000>;
def XVMINI_DU : LASX2RI5_XXI<0x76978000>;

def XVMUL_B : LASX3R_XXX<0x74840000>;
def XVMUL_H : LASX3R_XXX<0x74848000>;
def XVMUL_W : LASX3R_XXX<0x74850000>;
def XVMUL_D : LASX3R_XXX<0x74858000>;

def XVMUH_B : LASX3R_XXX<0x74860000>;
def XVMUH_H : LASX3R_XXX<0x74868000>;
def XVMUH_W : LASX3R_XXX<0x74870000>;
def XVMUH_D : LASX3R_XXX<0x74878000>;
def XVMUH_BU : LASX3R_XXX<0x74880000>;
def XVMUH_HU : LASX3R_XXX<0x74888000>;
def XVMUH_WU : LASX3R_XXX<0x74890000>;
def XVMUH_DU : LASX3R_XXX<0x74898000>;

def XVMULWEV_H_B : LASX3R_XXX<0x74900000>;
def XVMULWEV_W_H : LASX3R_XXX<0x74908000>;
def XVMULWEV_D_W : LASX3R_XXX<0x74910000>;
def XVMULWEV_Q_D : LASX3R_XXX<0x74918000>;
def XVMULWOD_H_B : LASX3R_XXX<0x74920000>;
def XVMULWOD_W_H : LASX3R_XXX<0x74928000>;
def XVMULWOD_D_W : LASX3R_XXX<0x74930000>;
def XVMULWOD_Q_D : LASX3R_XXX<0x74938000>;
def XVMULWEV_H_BU : LASX3R_XXX<0x74980000>;
def XVMULWEV_W_HU : LASX3R_XXX<0x74988000>;
def XVMULWEV_D_WU : LASX3R_XXX<0x74990000>;
def XVMULWEV_Q_DU : LASX3R_XXX<0x74998000>;
def XVMULWOD_H_BU : LASX3R_XXX<0x749a0000>;
def XVMULWOD_W_HU : LASX3R_XXX<0x749a8000>;
def XVMULWOD_D_WU : LASX3R_XXX<0x749b0000>;
def XVMULWOD_Q_DU : LASX3R_XXX<0x749b8000>;
def XVMULWEV_H_BU_B : LASX3R_XXX<0x74a00000>;
def XVMULWEV_W_HU_H : LASX3R_XXX<0x74a08000>;
def XVMULWEV_D_WU_W : LASX3R_XXX<0x74a10000>;
def XVMULWEV_Q_DU_D : LASX3R_XXX<0x74a18000>;
def XVMULWOD_H_BU_B : LASX3R_XXX<0x74a20000>;
def XVMULWOD_W_HU_H : LASX3R_XXX<0x74a28000>;
def XVMULWOD_D_WU_W : LASX3R_XXX<0x74a30000>;
def XVMULWOD_Q_DU_D : LASX3R_XXX<0x74a38000>;

def XVMADD_B : LASX3R_XXXX<0x74a80000>;
def XVMADD_H : LASX3R_XXXX<0x74a88000>;
def XVMADD_W : LASX3R_XXXX<0x74a90000>;
def XVMADD_D : LASX3R_XXXX<0x74a98000>;

def XVMSUB_B : LASX3R_XXXX<0x74aa0000>;
def XVMSUB_H : LASX3R_XXXX<0x74aa8000>;
def XVMSUB_W : LASX3R_XXXX<0x74ab0000>;
def XVMSUB_D : LASX3R_XXXX<0x74ab8000>;

def XVMADDWEV_H_B : LASX3R_XXXX<0x74ac0000>;
def XVMADDWEV_W_H : LASX3R_XXXX<0x74ac8000>;
def XVMADDWEV_D_W : LASX3R_XXXX<0x74ad0000>;
def XVMADDWEV_Q_D : LASX3R_XXXX<0x74ad8000>;
def XVMADDWOD_H_B : LASX3R_XXXX<0x74ae0000>;
def XVMADDWOD_W_H : LASX3R_XXXX<0x74ae8000>;
def XVMADDWOD_D_W : LASX3R_XXXX<0x74af0000>;
def XVMADDWOD_Q_D : LASX3R_XXXX<0x74af8000>;
def XVMADDWEV_H_BU : LASX3R_XXXX<0x74b40000>;
def XVMADDWEV_W_HU : LASX3R_XXXX<0x74b48000>;
def XVMADDWEV_D_WU : LASX3R_XXXX<0x74b50000>;
def XVMADDWEV_Q_DU : LASX3R_XXXX<0x74b58000>;
def XVMADDWOD_H_BU : LASX3R_XXXX<0x74b60000>;
def XVMADDWOD_W_HU : LASX3R_XXXX<0x74b68000>;
def XVMADDWOD_D_WU : LASX3R_XXXX<0x74b70000>;
def XVMADDWOD_Q_DU : LASX3R_XXXX<0x74b78000>;
def XVMADDWEV_H_BU_B : LASX3R_XXXX<0x74bc0000>;
def XVMADDWEV_W_HU_H : LASX3R_XXXX<0x74bc8000>;
def XVMADDWEV_D_WU_W : LASX3R_XXXX<0x74bd0000>;
def XVMADDWEV_Q_DU_D : LASX3R_XXXX<0x74bd8000>;
def XVMADDWOD_H_BU_B : LASX3R_XXXX<0x74be0000>;
def XVMADDWOD_W_HU_H : LASX3R_XXXX<0x74be8000>;
def XVMADDWOD_D_WU_W : LASX3R_XXXX<0x74bf0000>;
def XVMADDWOD_Q_DU_D : LASX3R_XXXX<0x74bf8000>;

def XVDIV_B : LASX3R_XXX<0x74e00000>;
def XVDIV_H : LASX3R_XXX<0x74e08000>;
def XVDIV_W : LASX3R_XXX<0x74e10000>;
def XVDIV_D : LASX3R_XXX<0x74e18000>;
def XVDIV_BU : LASX3R_XXX<0x74e40000>;
def XVDIV_HU : LASX3R_XXX<0x74e48000>;
def XVDIV_WU : LASX3R_XXX<0x74e50000>;
def XVDIV_DU : LASX3R_XXX<0x74e58000>;

def XVMOD_B : LASX3R_XXX<0x74e20000>;
def XVMOD_H : LASX3R_XXX<0x74e28000>;
def XVMOD_W : LASX3R_XXX<0x74e30000>;
def XVMOD_D : LASX3R_XXX<0x74e38000>;
def XVMOD_BU : LASX3R_XXX<0x74e60000>;
def XVMOD_HU : LASX3R_XXX<0x74e68000>;
def XVMOD_WU : LASX3R_XXX<0x74e70000>;
def XVMOD_DU : LASX3R_XXX<0x74e78000>;

def XVSAT_B : LASX2RI3_XXI<0x77242000>;
def XVSAT_H : LASX2RI4_XXI<0x77244000>;
def XVSAT_W : LASX2RI5_XXI<0x77248000>;
def XVSAT_D : LASX2RI6_XXI<0x77250000>;
def XVSAT_BU : LASX2RI3_XXI<0x77282000>;
def XVSAT_HU : LASX2RI4_XXI<0x77284000>;
def XVSAT_WU : LASX2RI5_XXI<0x77288000>;
def XVSAT_DU : LASX2RI6_XXI<0x77290000>;

def XVEXTH_H_B : LASX2R_XX<0x769ee000>;
def XVEXTH_W_H : LASX2R_XX<0x769ee400>;
def XVEXTH_D_W : LASX2R_XX<0x769ee800>;
def XVEXTH_Q_D : LASX2R_XX<0x769eec00>;
def XVEXTH_HU_BU : LASX2R_XX<0x769ef000>;
def XVEXTH_WU_HU : LASX2R_XX<0x769ef400>;
def XVEXTH_DU_WU : LASX2R_XX<0x769ef800>;
def XVEXTH_QU_DU : LASX2R_XX<0x769efc00>;

def VEXT2XV_H_B : LASX2R_XX<0x769f1000>;
def VEXT2XV_W_B : LASX2R_XX<0x769f1400>;
def VEXT2XV_D_B : LASX2R_XX<0x769f1800>;
def VEXT2XV_W_H : LASX2R_XX<0x769f1c00>;
def VEXT2XV_D_H : LASX2R_XX<0x769f2000>;
def VEXT2XV_D_W : LASX2R_XX<0x769f2400>;
def VEXT2XV_HU_BU : LASX2R_XX<0x769f2800>;
def VEXT2XV_WU_BU : LASX2R_XX<0x769f2c00>;
def VEXT2XV_DU_BU : LASX2R_XX<0x769f3000>;
def VEXT2XV_WU_HU : LASX2R_XX<0x769f3400>;
def VEXT2XV_DU_HU : LASX2R_XX<0x769f3800>;
def VEXT2XV_DU_WU : LASX2R_XX<0x769f3c00>;

def XVHSELI_D : LASX2RI5_XXI<0x769f8000>;

def XVSIGNCOV_B : LASX3R_XXX<0x752e0000>;
def XVSIGNCOV_H : LASX3R_XXX<0x752e8000>;
def XVSIGNCOV_W : LASX3R_XXX<0x752f0000>;
def XVSIGNCOV_D : LASX3R_XXX<0x752f8000>;

def XVMSKLTZ_B : LASX2R_XX<0x769c4000>;
def XVMSKLTZ_H : LASX2R_XX<0x769c4400>;
def XVMSKLTZ_W : LASX2R_XX<0x769c4800>;
def XVMSKLTZ_D : LASX2R_XX<0x769c4c00>;

def XVMSKGEZ_B : LASX2R_XX<0x769c5000>;

def XVMSKNZ_B : LASX2R_XX<0x769c6000>;

def XVLDI : LASX1RI13_XI<0x77e00000>;

def XVAND_V : LASX3R_XXX<0x75260000>;
def XVOR_V : LASX3R_XXX<0x75268000>;
def XVXOR_V : LASX3R_XXX<0x75270000>;
def XVNOR_V : LASX3R_XXX<0x75278000>;
def XVANDN_V : LASX3R_XXX<0x75280000>;
def XVORN_V : LASX3R_XXX<0x75288000>;

def XVANDI_B : LASX2RI8_XXI<0x77d00000>;
def XVORI_B : LASX2RI8_XXI<0x77d40000>;
def XVXORI_B : LASX2RI8_XXI<0x77d80000>;
def XVNORI_B : LASX2RI8_XXI<0x77dc0000>;

def XVSLL_B : LASX3R_XXX<0x74e80000>;
def XVSLL_H : LASX3R_XXX<0x74e88000>;
def XVSLL_W : LASX3R_XXX<0x74e90000>;
def XVSLL_D : LASX3R_XXX<0x74e98000>;
def XVSLLI_B : LASX2RI3_XXI<0x772c2000>;
def XVSLLI_H : LASX2RI4_XXI<0x772c4000>;
def XVSLLI_W : LASX2RI5_XXI<0x772c8000>;
def XVSLLI_D : LASX2RI6_XXI<0x772d0000>;

def XVSRL_B : LASX3R_XXX<0x74ea0000>;
def XVSRL_H : LASX3R_XXX<0x74ea8000>;
def XVSRL_W : LASX3R_XXX<0x74eb0000>;
def XVSRL_D : LASX3R_XXX<0x74eb8000>;
def XVSRLI_B : LASX2RI3_XXI<0x77302000>;
def XVSRLI_H : LASX2RI4_XXI<0x77304000>;
def XVSRLI_W : LASX2RI5_XXI<0x77308000>;
def XVSRLI_D : LASX2RI6_XXI<0x77310000>;

def XVSRA_B : LASX3R_XXX<0x74ec0000>;
def XVSRA_H : LASX3R_XXX<0x74ec8000>;
def XVSRA_W : LASX3R_XXX<0x74ed0000>;
def XVSRA_D : LASX3R_XXX<0x74ed8000>;
def XVSRAI_B : LASX2RI3_XXI<0x77342000>;
def XVSRAI_H : LASX2RI4_XXI<0x77344000>;
def XVSRAI_W : LASX2RI5_XXI<0x77348000>;
def XVSRAI_D : LASX2RI6_XXI<0x77350000>;

def XVROTR_B : LASX3R_XXX<0x74ee0000>;
def XVROTR_H : LASX3R_XXX<0x74ee8000>;
def XVROTR_W : LASX3R_XXX<0x74ef0000>;
def XVROTR_D : LASX3R_XXX<0x74ef8000>;
def XVROTRI_B : LASX2RI3_XXI<0x76a02000>;
def XVROTRI_H : LASX2RI4_XXI<0x76a04000>;
def XVROTRI_W : LASX2RI5_XXI<0x76a08000>;
def XVROTRI_D : LASX2RI6_XXI<0x76a10000>;

def XVSLLWIL_H_B : LASX2RI3_XXI<0x77082000>;
def XVSLLWIL_W_H : LASX2RI4_XXI<0x77084000>;
def XVSLLWIL_D_W : LASX2RI5_XXI<0x77088000>;
def XVEXTL_Q_D : LASX2R_XX<0x77090000>;
def XVSLLWIL_HU_BU : LASX2RI3_XXI<0x770c2000>;
def XVSLLWIL_WU_HU : LASX2RI4_XXI<0x770c4000>;
def XVSLLWIL_DU_WU : LASX2RI5_XXI<0x770c8000>;
def XVEXTL_QU_DU : LASX2R_XX<0x770d0000>;

def XVSRLR_B : LASX3R_XXX<0x74f00000>;
def XVSRLR_H : LASX3R_XXX<0x74f08000>;
def XVSRLR_W : LASX3R_XXX<0x74f10000>;
def XVSRLR_D : LASX3R_XXX<0x74f18000>;
def XVSRLRI_B : LASX2RI3_XXI<0x76a42000>;
def XVSRLRI_H : LASX2RI4_XXI<0x76a44000>;
def XVSRLRI_W : LASX2RI5_XXI<0x76a48000>;
def XVSRLRI_D : LASX2RI6_XXI<0x76a50000>;

def XVSRAR_B : LASX3R_XXX<0x74f20000>;
def XVSRAR_H : LASX3R_XXX<0x74f28000>;
def XVSRAR_W : LASX3R_XXX<0x74f30000>;
def XVSRAR_D : LASX3R_XXX<0x74f38000>;
def XVSRARI_B : LASX2RI3_XXI<0x76a82000>;
def XVSRARI_H : LASX2RI4_XXI<0x76a84000>;
def XVSRARI_W : LASX2RI5_XXI<0x76a88000>;
def XVSRARI_D : LASX2RI6_XXI<0x76a90000>;

def XVSRLN_B_H : LASX3R_XXX<0x74f48000>;
def XVSRLN_H_W : LASX3R_XXX<0x74f50000>;
def XVSRLN_W_D : LASX3R_XXX<0x74f58000>;
def XVSRAN_B_H : LASX3R_XXX<0x74f68000>;
def XVSRAN_H_W : LASX3R_XXX<0x74f70000>;
def XVSRAN_W_D : LASX3R_XXX<0x74f78000>;

def XVSRLNI_B_H : LASX2RI4_XXXI<0x77404000>;
def XVSRLNI_H_W : LASX2RI5_XXXI<0x77408000>;
def XVSRLNI_W_D : LASX2RI6_XXXI<0x77410000>;
def XVSRLNI_D_Q : LASX2RI7_XXXI<0x77420000>;
def XVSRANI_B_H : LASX2RI4_XXXI<0x77584000>;
def XVSRANI_H_W : LASX2RI5_XXXI<0x77588000>;
def XVSRANI_W_D : LASX2RI6_XXXI<0x77590000>;
def XVSRANI_D_Q : LASX2RI7_XXXI<0x775a0000>;

def XVSRLRN_B_H : LASX3R_XXX<0x74f88000>;
def XVSRLRN_H_W : LASX3R_XXX<0x74f90000>;
def XVSRLRN_W_D : LASX3R_XXX<0x74f98000>;
def XVSRARN_B_H : LASX3R_XXX<0x74fa8000>;
def XVSRARN_H_W : LASX3R_XXX<0x74fb0000>;
def XVSRARN_W_D : LASX3R_XXX<0x74fb8000>;

def XVSRLRNI_B_H : LASX2RI4_XXXI<0x77444000>;
def XVSRLRNI_H_W : LASX2RI5_XXXI<0x77448000>;
def XVSRLRNI_W_D : LASX2RI6_XXXI<0x77450000>;
def XVSRLRNI_D_Q : LASX2RI7_XXXI<0x77460000>;
def XVSRARNI_B_H : LASX2RI4_XXXI<0x775c4000>;
def XVSRARNI_H_W : LASX2RI5_XXXI<0x775c8000>;
def XVSRARNI_W_D : LASX2RI6_XXXI<0x775d0000>;
def XVSRARNI_D_Q : LASX2RI7_XXXI<0x775e0000>;

def XVSSRLN_B_H : LASX3R_XXX<0x74fc8000>;
def XVSSRLN_H_W : LASX3R_XXX<0x74fd0000>;
def XVSSRLN_W_D : LASX3R_XXX<0x74fd8000>;
def XVSSRAN_B_H : LASX3R_XXX<0x74fe8000>;
def XVSSRAN_H_W : LASX3R_XXX<0x74ff0000>;
def XVSSRAN_W_D : LASX3R_XXX<0x74ff8000>;
def XVSSRLN_BU_H : LASX3R_XXX<0x75048000>;
def XVSSRLN_HU_W : LASX3R_XXX<0x75050000>;
def XVSSRLN_WU_D : LASX3R_XXX<0x75058000>;
def XVSSRAN_BU_H : LASX3R_XXX<0x75068000>;
def XVSSRAN_HU_W : LASX3R_XXX<0x75070000>;
def XVSSRAN_WU_D : LASX3R_XXX<0x75078000>;

def XVSSRLNI_B_H : LASX2RI4_XXXI<0x77484000>;
def XVSSRLNI_H_W : LASX2RI5_XXXI<0x77488000>;
def XVSSRLNI_W_D : LASX2RI6_XXXI<0x77490000>;
def XVSSRLNI_D_Q : LASX2RI7_XXXI<0x774a0000>;
def XVSSRANI_B_H : LASX2RI4_XXXI<0x77604000>;
def XVSSRANI_H_W : LASX2RI5_XXXI<0x77608000>;
def XVSSRANI_W_D : LASX2RI6_XXXI<0x77610000>;
def XVSSRANI_D_Q : LASX2RI7_XXXI<0x77620000>;
def XVSSRLNI_BU_H : LASX2RI4_XXXI<0x774c4000>;
def XVSSRLNI_HU_W : LASX2RI5_XXXI<0x774c8000>;
def XVSSRLNI_WU_D : LASX2RI6_XXXI<0x774d0000>;
def XVSSRLNI_DU_Q : LASX2RI7_XXXI<0x774e0000>;
def XVSSRANI_BU_H : LASX2RI4_XXXI<0x77644000>;
def XVSSRANI_HU_W : LASX2RI5_XXXI<0x77648000>;
def XVSSRANI_WU_D : LASX2RI6_XXXI<0x77650000>;
def XVSSRANI_DU_Q : LASX2RI7_XXXI<0x77660000>;

def XVSSRLRN_B_H : LASX3R_XXX<0x75008000>;
def XVSSRLRN_H_W : LASX3R_XXX<0x75010000>;
def XVSSRLRN_W_D : LASX3R_XXX<0x75018000>;
def XVSSRARN_B_H : LASX3R_XXX<0x75028000>;
def XVSSRARN_H_W : LASX3R_XXX<0x75030000>;
def XVSSRARN_W_D : LASX3R_XXX<0x75038000>;
def XVSSRLRN_BU_H : LASX3R_XXX<0x75088000>;
def XVSSRLRN_HU_W : LASX3R_XXX<0x75090000>;
def XVSSRLRN_WU_D : LASX3R_XXX<0x75098000>;
def XVSSRARN_BU_H : LASX3R_XXX<0x750a8000>;
def XVSSRARN_HU_W : LASX3R_XXX<0x750b0000>;
def XVSSRARN_WU_D : LASX3R_XXX<0x750b8000>;

def XVSSRLRNI_B_H : LASX2RI4_XXXI<0x77504000>;
def XVSSRLRNI_H_W : LASX2RI5_XXXI<0x77508000>;
def XVSSRLRNI_W_D : LASX2RI6_XXXI<0x77510000>;
def XVSSRLRNI_D_Q : LASX2RI7_XXXI<0x77520000>;
def XVSSRARNI_B_H : LASX2RI4_XXXI<0x77684000>;
def XVSSRARNI_H_W : LASX2RI5_XXXI<0x77688000>;
def XVSSRARNI_W_D : LASX2RI6_XXXI<0x77690000>;
def XVSSRARNI_D_Q : LASX2RI7_XXXI<0x776a0000>;
def XVSSRLRNI_BU_H : LASX2RI4_XXXI<0x77544000>;
def XVSSRLRNI_HU_W : LASX2RI5_XXXI<0x77548000>;
def XVSSRLRNI_WU_D : LASX2RI6_XXXI<0x77550000>;
def XVSSRLRNI_DU_Q : LASX2RI7_XXXI<0x77560000>;
def XVSSRARNI_BU_H : LASX2RI4_XXXI<0x776c4000>;
def XVSSRARNI_HU_W : LASX2RI5_XXXI<0x776c8000>;
def XVSSRARNI_WU_D : LASX2RI6_XXXI<0x776d0000>;
def XVSSRARNI_DU_Q : LASX2RI7_XXXI<0x776e0000>;

def XVCLO_B : LASX2R_XX<0x769c0000>;
def XVCLO_H : LASX2R_XX<0x769c0400>;
def XVCLO_W : LASX2R_XX<0x769c0800>;
def XVCLO_D : LASX2R_XX<0x769c0c00>;
def XVCLZ_B : LASX2R_XX<0x769c1000>;
def XVCLZ_H : LASX2R_XX<0x769c1400>;
def XVCLZ_W : LASX2R_XX<0x769c1800>;
def XVCLZ_D : LASX2R_XX<0x769c1c00>;

def XVPCNT_B : LASX2R_XX<0x769c2000>;
def XVPCNT_H : LASX2R_XX<0x769c2400>;
def XVPCNT_W : LASX2R_XX<0x769c2800>;
def XVPCNT_D : LASX2R_XX<0x769c2c00>;

def XVBITCLR_B : LASX3R_XXX<0x750c0000>;
def XVBITCLR_H : LASX3R_XXX<0x750c8000>;
def XVBITCLR_W : LASX3R_XXX<0x750d0000>;
def XVBITCLR_D : LASX3R_XXX<0x750d8000>;
def XVBITCLRI_B : LASX2RI3_XXI<0x77102000>;
def XVBITCLRI_H : LASX2RI4_XXI<0x77104000>;
def XVBITCLRI_W : LASX2RI5_XXI<0x77108000>;
def XVBITCLRI_D : LASX2RI6_XXI<0x77110000>;

def XVBITSET_B : LASX3R_XXX<0x750e0000>;
def XVBITSET_H : LASX3R_XXX<0x750e8000>;
def XVBITSET_W : LASX3R_XXX<0x750f0000>;
def XVBITSET_D : LASX3R_XXX<0x750f8000>;
def XVBITSETI_B : LASX2RI3_XXI<0x77142000>;
def XVBITSETI_H : LASX2RI4_XXI<0x77144000>;
def XVBITSETI_W : LASX2RI5_XXI<0x77148000>;
def XVBITSETI_D : LASX2RI6_XXI<0x77150000>;

def XVBITREV_B : LASX3R_XXX<0x75100000>;
def XVBITREV_H : LASX3R_XXX<0x75108000>;
def XVBITREV_W : LASX3R_XXX<0x75110000>;
def XVBITREV_D : LASX3R_XXX<0x75118000>;
def XVBITREVI_B : LASX2RI3_XXI<0x77182000>;
def XVBITREVI_H : LASX2RI4_XXI<0x77184000>;
def XVBITREVI_W : LASX2RI5_XXI<0x77188000>;
def XVBITREVI_D : LASX2RI6_XXI<0x77190000>;

def XVFRSTP_B : LASX3R_XXXX<0x752b0000>;
def XVFRSTP_H : LASX3R_XXXX<0x752b8000>;
def XVFRSTPI_B : LASX2RI5_XXXI<0x769a0000>;
def XVFRSTPI_H : LASX2RI5_XXXI<0x769a8000>;

def XVFADD_S : LASX3R_XXX<0x75308000>;
def XVFADD_D : LASX3R_XXX<0x75310000>;
def XVFSUB_S : LASX3R_XXX<0x75328000>;
def XVFSUB_D : LASX3R_XXX<0x75330000>;
def XVFMUL_S : LASX3R_XXX<0x75388000>;
def XVFMUL_D : LASX3R_XXX<0x75390000>;
def XVFDIV_S : LASX3R_XXX<0x753a8000>;
def XVFDIV_D : LASX3R_XXX<0x753b0000>;

def XVFMADD_S : LASX4R_XXXX<0x0a100000>;
def XVFMADD_D : LASX4R_XXXX<0x0a200000>;
def XVFMSUB_S : LASX4R_XXXX<0x0a500000>;
def XVFMSUB_D : LASX4R_XXXX<0x0a600000>;
def XVFNMADD_S : LASX4R_XXXX<0x0a900000>;
def XVFNMADD_D : LASX4R_XXXX<0x0aa00000>;
def XVFNMSUB_S : LASX4R_XXXX<0x0ad00000>;
def XVFNMSUB_D : LASX4R_XXXX<0x0ae00000>;

def XVFMAX_S : LASX3R_XXX<0x753c8000>;
def XVFMAX_D : LASX3R_XXX<0x753d0000>;
def XVFMIN_S : LASX3R_XXX<0x753e8000>;
def XVFMIN_D : LASX3R_XXX<0x753f0000>;

def XVFMAXA_S : LASX3R_XXX<0x75408000>;
def XVFMAXA_D : LASX3R_XXX<0x75410000>;
def XVFMINA_S : LASX3R_XXX<0x75428000>;
def XVFMINA_D : LASX3R_XXX<0x75430000>;

def XVFLOGB_S : LASX2R_XX<0x769cc400>;
def XVFLOGB_D : LASX2R_XX<0x769cc800>;

def XVFCLASS_S : LASX2R_XX<0x769cd400>;
def XVFCLASS_D : LASX2R_XX<0x769cd800>;

def XVFSQRT_S : LASX2R_XX<0x769ce400>;
def XVFSQRT_D : LASX2R_XX<0x769ce800>;
def XVFRECIP_S : LASX2R_XX<0x769cf400>;
def XVFRECIP_D : LASX2R_XX<0x769cf800>;
def XVFRSQRT_S : LASX2R_XX<0x769d0400>;
def XVFRSQRT_D : LASX2R_XX<0x769d0800>;
def XVFRECIPE_S : LASX2R_XX<0x769d1400>;
def XVFRECIPE_D : LASX2R_XX<0x769d1800>;
def XVFRSQRTE_S : LASX2R_XX<0x769d2400>;
def XVFRSQRTE_D : LASX2R_XX<0x769d2800>;

def XVFCVTL_S_H : LASX2R_XX<0x769de800>;
def XVFCVTH_S_H : LASX2R_XX<0x769dec00>;
def XVFCVTL_D_S : LASX2R_XX<0x769df000>;
def XVFCVTH_D_S : LASX2R_XX<0x769df400>;
def XVFCVT_H_S : LASX3R_XXX<0x75460000>;
def XVFCVT_S_D : LASX3R_XXX<0x75468000>;

def XVFRINTRNE_S : LASX2R_XX<0x769d7400>;
def XVFRINTRNE_D : LASX2R_XX<0x769d7800>;
def XVFRINTRZ_S : LASX2R_XX<0x769d6400>;
def XVFRINTRZ_D : LASX2R_XX<0x769d6800>;
def XVFRINTRP_S : LASX2R_XX<0x769d5400>;
def XVFRINTRP_D : LASX2R_XX<0x769d5800>;
def XVFRINTRM_S : LASX2R_XX<0x769d4400>;
def XVFRINTRM_D : LASX2R_XX<0x769d4800>;
def XVFRINT_S : LASX2R_XX<0x769d3400>;
def XVFRINT_D : LASX2R_XX<0x769d3800>;

def XVFTINTRNE_W_S : LASX2R_XX<0x769e5000>;
def XVFTINTRNE_L_D : LASX2R_XX<0x769e5400>;
def XVFTINTRZ_W_S : LASX2R_XX<0x769e4800>;
def XVFTINTRZ_L_D : LASX2R_XX<0x769e4c00>;
def XVFTINTRP_W_S : LASX2R_XX<0x769e4000>;
def XVFTINTRP_L_D : LASX2R_XX<0x769e4400>;
def XVFTINTRM_W_S : LASX2R_XX<0x769e3800>;
def XVFTINTRM_L_D : LASX2R_XX<0x769e3c00>;
def XVFTINT_W_S : LASX2R_XX<0x769e3000>;
def XVFTINT_L_D : LASX2R_XX<0x769e3400>;
def XVFTINTRZ_WU_S : LASX2R_XX<0x769e7000>;
def XVFTINTRZ_LU_D : LASX2R_XX<0x769e7400>;
def XVFTINT_WU_S : LASX2R_XX<0x769e5800>;
def XVFTINT_LU_D : LASX2R_XX<0x769e5c00>;

def XVFTINTRNE_W_D : LASX3R_XXX<0x754b8000>;
def XVFTINTRZ_W_D : LASX3R_XXX<0x754b0000>;
def XVFTINTRP_W_D : LASX3R_XXX<0x754a8000>;
def XVFTINTRM_W_D : LASX3R_XXX<0x754a0000>;
def XVFTINT_W_D : LASX3R_XXX<0x75498000>;

def XVFTINTRNEL_L_S : LASX2R_XX<0x769ea000>;
def XVFTINTRNEH_L_S : LASX2R_XX<0x769ea400>;
def XVFTINTRZL_L_S : LASX2R_XX<0x769e9800>;
def XVFTINTRZH_L_S : LASX2R_XX<0x769e9c00>;
def XVFTINTRPL_L_S : LASX2R_XX<0x769e9000>;
def XVFTINTRPH_L_S : LASX2R_XX<0x769e9400>;
def XVFTINTRML_L_S : LASX2R_XX<0x769e8800>;
def XVFTINTRMH_L_S : LASX2R_XX<0x769e8c00>;
def XVFTINTL_L_S : LASX2R_XX<0x769e8000>;
def XVFTINTH_L_S : LASX2R_XX<0x769e8400>;

def XVFFINT_S_W : LASX2R_XX<0x769e0000>;
def XVFFINT_D_L : LASX2R_XX<0x769e0800>;
def XVFFINT_S_WU : LASX2R_XX<0x769e0400>;
def XVFFINT_D_LU : LASX2R_XX<0x769e0c00>;
def XVFFINTL_D_W : LASX2R_XX<0x769e1000>;
def XVFFINTH_D_W : LASX2R_XX<0x769e1400>;
def XVFFINT_S_L : LASX3R_XXX<0x75480000>;

def XVSEQ_B : LASX3R_XXX<0x74000000>;
def XVSEQ_H : LASX3R_XXX<0x74008000>;
def XVSEQ_W : LASX3R_XXX<0x74010000>;
def XVSEQ_D : LASX3R_XXX<0x74018000>;
def XVSEQI_B : LASX2RI5_XXI<0x76800000, simm5>;
def XVSEQI_H : LASX2RI5_XXI<0x76808000, simm5>;
def XVSEQI_W : LASX2RI5_XXI<0x76810000, simm5>;
def XVSEQI_D : LASX2RI5_XXI<0x76818000, simm5>;

def XVSLE_B : LASX3R_XXX<0x74020000>;
def XVSLE_H : LASX3R_XXX<0x74028000>;
def XVSLE_W : LASX3R_XXX<0x74030000>;
def XVSLE_D : LASX3R_XXX<0x74038000>;
def XVSLEI_B : LASX2RI5_XXI<0x76820000, simm5>;
def XVSLEI_H : LASX2RI5_XXI<0x76828000, simm5>;
def XVSLEI_W : LASX2RI5_XXI<0x76830000, simm5>;
def XVSLEI_D : LASX2RI5_XXI<0x76838000, simm5>;

def XVSLE_BU : LASX3R_XXX<0x74040000>;
def XVSLE_HU : LASX3R_XXX<0x74048000>;
def XVSLE_WU : LASX3R_XXX<0x74050000>;
def XVSLE_DU : LASX3R_XXX<0x74058000>;
def XVSLEI_BU : LASX2RI5_XXI<0x76840000>;
def XVSLEI_HU : LASX2RI5_XXI<0x76848000>;
def XVSLEI_WU : LASX2RI5_XXI<0x76850000>;
def XVSLEI_DU : LASX2RI5_XXI<0x76858000>;

def XVSLT_B : LASX3R_XXX<0x74060000>;
def XVSLT_H : LASX3R_XXX<0x74068000>;
def XVSLT_W : LASX3R_XXX<0x74070000>;
def XVSLT_D : LASX3R_XXX<0x74078000>;
def XVSLTI_B : LASX2RI5_XXI<0x76860000, simm5>;
def XVSLTI_H : LASX2RI5_XXI<0x76868000, simm5>;
def XVSLTI_W : LASX2RI5_XXI<0x76870000, simm5>;
def XVSLTI_D : LASX2RI5_XXI<0x76878000, simm5>;

def XVSLT_BU : LASX3R_XXX<0x74080000>;
def XVSLT_HU : LASX3R_XXX<0x74088000>;
def XVSLT_WU : LASX3R_XXX<0x74090000>;
def XVSLT_DU : LASX3R_XXX<0x74098000>;
def XVSLTI_BU : LASX2RI5_XXI<0x76880000>;
def XVSLTI_HU : LASX2RI5_XXI<0x76888000>;
def XVSLTI_WU : LASX2RI5_XXI<0x76890000>;
def XVSLTI_DU : LASX2RI5_XXI<0x76898000>;

def XVFCMP_CAF_S : LASX3R_XXX<0x0c900000>;
def XVFCMP_SAF_S : LASX3R_XXX<0x0c908000>;
def XVFCMP_CLT_S : LASX3R_XXX<0x0c910000>;
def XVFCMP_SLT_S : LASX3R_XXX<0x0c918000>;
def XVFCMP_CEQ_S : LASX3R_XXX<0x0c920000>;
def XVFCMP_SEQ_S : LASX3R_XXX<0x0c928000>;
def XVFCMP_CLE_S : LASX3R_XXX<0x0c930000>;
def XVFCMP_SLE_S : LASX3R_XXX<0x0c938000>;
def XVFCMP_CUN_S : LASX3R_XXX<0x0c940000>;
def XVFCMP_SUN_S : LASX3R_XXX<0x0c948000>;
def XVFCMP_CULT_S : LASX3R_XXX<0x0c950000>;
def XVFCMP_SULT_S : LASX3R_XXX<0x0c958000>;
def XVFCMP_CUEQ_S : LASX3R_XXX<0x0c960000>;
def XVFCMP_SUEQ_S : LASX3R_XXX<0x0c968000>;
def XVFCMP_CULE_S : LASX3R_XXX<0x0c970000>;
def XVFCMP_SULE_S : LASX3R_XXX<0x0c978000>;
def XVFCMP_CNE_S : LASX3R_XXX<0x0c980000>;
def XVFCMP_SNE_S : LASX3R_XXX<0x0c988000>;
def XVFCMP_COR_S : LASX3R_XXX<0x0c9a0000>;
def XVFCMP_SOR_S : LASX3R_XXX<0x0c9a8000>;
def XVFCMP_CUNE_S : LASX3R_XXX<0x0c9c0000>;
def XVFCMP_SUNE_S : LASX3R_XXX<0x0c9c8000>;

def XVFCMP_CAF_D : LASX3R_XXX<0x0ca00000>;
def XVFCMP_SAF_D : LASX3R_XXX<0x0ca08000>;
def XVFCMP_CLT_D : LASX3R_XXX<0x0ca10000>;
def XVFCMP_SLT_D : LASX3R_XXX<0x0ca18000>;
def XVFCMP_CEQ_D : LASX3R_XXX<0x0ca20000>;
def XVFCMP_SEQ_D : LASX3R_XXX<0x0ca28000>;
def XVFCMP_CLE_D : LASX3R_XXX<0x0ca30000>;
def XVFCMP_SLE_D : LASX3R_XXX<0x0ca38000>;
def XVFCMP_CUN_D : LASX3R_XXX<0x0ca40000>;
def XVFCMP_SUN_D : LASX3R_XXX<0x0ca48000>;
def XVFCMP_CULT_D : LASX3R_XXX<0x0ca50000>;
def XVFCMP_SULT_D : LASX3R_XXX<0x0ca58000>;
def XVFCMP_CUEQ_D : LASX3R_XXX<0x0ca60000>;
def XVFCMP_SUEQ_D : LASX3R_XXX<0x0ca68000>;
def XVFCMP_CULE_D : LASX3R_XXX<0x0ca70000>;
def XVFCMP_SULE_D : LASX3R_XXX<0x0ca78000>;
def XVFCMP_CNE_D : LASX3R_XXX<0x0ca80000>;
def XVFCMP_SNE_D : LASX3R_XXX<0x0ca88000>;
def XVFCMP_COR_D : LASX3R_XXX<0x0caa0000>;
def XVFCMP_SOR_D : LASX3R_XXX<0x0caa8000>;
def XVFCMP_CUNE_D : LASX3R_XXX<0x0cac0000>;
def XVFCMP_SUNE_D : LASX3R_XXX<0x0cac8000>;

def XVBITSEL_V : LASX4R_XXXX<0x0d200000>;

def XVBITSELI_B : LASX2RI8_XXXI<0x77c40000>;

def XVSETEQZ_V : LASX2R_CX<0x769c9800>;
def XVSETNEZ_V : LASX2R_CX<0x769c9c00>;
def XVSETANYEQZ_B : LASX2R_CX<0x769ca000>;
def XVSETANYEQZ_H : LASX2R_CX<0x769ca400>;
def XVSETANYEQZ_W : LASX2R_CX<0x769ca800>;
def XVSETANYEQZ_D : LASX2R_CX<0x769cac00>;
def XVSETALLNEZ_B : LASX2R_CX<0x769cb000>;
def XVSETALLNEZ_H : LASX2R_CX<0x769cb400>;
def XVSETALLNEZ_W : LASX2R_CX<0x769cb800>;
def XVSETALLNEZ_D : LASX2R_CX<0x769cbc00>;

def XVINSGR2VR_W : LASX2RI3_XXRI<0x76ebc000>;
def XVINSGR2VR_D : LASX2RI2_XXRI<0x76ebe000>;
def XVPICKVE2GR_W : LASX2RI3_RXI<0x76efc000>;
def XVPICKVE2GR_D : LASX2RI2_RXI<0x76efe000>;
def XVPICKVE2GR_WU : LASX2RI3_RXI<0x76f3c000>;
def XVPICKVE2GR_DU : LASX2RI2_RXI<0x76f3e000>;

def XVREPLGR2VR_B : LASX2R_XR<0x769f0000>;
def XVREPLGR2VR_H : LASX2R_XR<0x769f0400>;
def XVREPLGR2VR_W : LASX2R_XR<0x769f0800>;
def XVREPLGR2VR_D : LASX2R_XR<0x769f0c00>;

def XVREPLVE_B : LASX3R_XXR<0x75220000>;
def XVREPLVE_H : LASX3R_XXR<0x75228000>;
def XVREPLVE_W : LASX3R_XXR<0x75230000>;
def XVREPLVE_D : LASX3R_XXR<0x75238000>;
def XVREPL128VEI_B : LASX2RI4_XXI<0x76f78000>;
def XVREPL128VEI_H : LASX2RI3_XXI<0x76f7c000>;
def XVREPL128VEI_W : LASX2RI2_XXI<0x76f7e000>;
def XVREPL128VEI_D : LASX2RI1_XXI<0x76f7f000>;

def XVREPLVE0_B : LASX2R_XX<0x77070000>;
def XVREPLVE0_H : LASX2R_XX<0x77078000>;
def XVREPLVE0_W : LASX2R_XX<0x7707c000>;
def XVREPLVE0_D : LASX2R_XX<0x7707e000>;
def XVREPLVE0_Q : LASX2R_XX<0x7707f000>;

def XVINSVE0_W : LASX2RI3_XXXI<0x76ffc000>;
def XVINSVE0_D : LASX2RI2_XXXI<0x76ffe000>;

def XVPICKVE_W : LASX2RI3_XXI<0x7703c000>;
def XVPICKVE_D : LASX2RI2_XXI<0x7703e000>;

def XVBSLL_V : LASX2RI5_XXI<0x768e0000>;
def XVBSRL_V : LASX2RI5_XXI<0x768e8000>;

def XVPACKEV_B : LASX3R_XXX<0x75160000>;
def XVPACKEV_H : LASX3R_XXX<0x75168000>;
def XVPACKEV_W : LASX3R_XXX<0x75170000>;
def XVPACKEV_D : LASX3R_XXX<0x75178000>;
def XVPACKOD_B : LASX3R_XXX<0x75180000>;
def XVPACKOD_H : LASX3R_XXX<0x75188000>;
def XVPACKOD_W : LASX3R_XXX<0x75190000>;
def XVPACKOD_D : LASX3R_XXX<0x75198000>;

def XVPICKEV_B : LASX3R_XXX<0x751e0000>;
def XVPICKEV_H : LASX3R_XXX<0x751e8000>;
def XVPICKEV_W : LASX3R_XXX<0x751f0000>;
def XVPICKEV_D : LASX3R_XXX<0x751f8000>;
def XVPICKOD_B : LASX3R_XXX<0x75200000>;
def XVPICKOD_H : LASX3R_XXX<0x75208000>;
def XVPICKOD_W : LASX3R_XXX<0x75210000>;
def XVPICKOD_D : LASX3R_XXX<0x75218000>;

def XVILVL_B : LASX3R_XXX<0x751a0000>;
def XVILVL_H : LASX3R_XXX<0x751a8000>;
def XVILVL_W : LASX3R_XXX<0x751b0000>;
def XVILVL_D : LASX3R_XXX<0x751b8000>;
def XVILVH_B : LASX3R_XXX<0x751c0000>;
def XVILVH_H : LASX3R_XXX<0x751c8000>;
def XVILVH_W : LASX3R_XXX<0x751d0000>;
def XVILVH_D : LASX3R_XXX<0x751d8000>;

def XVSHUF_B : LASX4R_XXXX<0x0d600000>;

def XVSHUF_H : LASX3R_XXXX<0x757a8000>;
def XVSHUF_W : LASX3R_XXXX<0x757b0000>;
def XVSHUF_D : LASX3R_XXXX<0x757b8000>;

def XVPERM_W : LASX3R_XXX<0x757d0000>;

def XVSHUF4I_B : LASX2RI8_XXI<0x77900000>;
def XVSHUF4I_H : LASX2RI8_XXI<0x77940000>;
def XVSHUF4I_W : LASX2RI8_XXI<0x77980000>;
def XVSHUF4I_D : LASX2RI8_XXXI<0x779c0000>;

def XVPERMI_W : LASX2RI8_XXXI<0x77e40000>;
def XVPERMI_D : LASX2RI8_XXI<0x77e80000>;
def XVPERMI_Q : LASX2RI8_XXXI<0x77ec0000>;

def XVEXTRINS_D : LASX2RI8_XXXI<0x77800000>;
def XVEXTRINS_W : LASX2RI8_XXXI<0x77840000>;
def XVEXTRINS_H : LASX2RI8_XXXI<0x77880000>;
def XVEXTRINS_B : LASX2RI8_XXXI<0x778c0000>;
} // mayLoad = 0, mayStore = 0

let mayLoad = 1, mayStore = 0 in {
def XVLD : LASX2RI12_Load<0x2c800000>;
def XVLDX : LASX3R_Load<0x38480000>;

def XVLDREPL_B : LASX2RI12_Load<0x32800000>;
def XVLDREPL_H : LASX2RI11_Load<0x32400000>;
def XVLDREPL_W : LASX2RI10_Load<0x32200000>;
def XVLDREPL_D : LASX2RI9_Load<0x32100000>;
} // mayLoad = 1, mayStore = 0

let mayLoad = 0, mayStore = 1 in {
def XVST : LASX2RI12_Store<0x2cc00000>;
def XVSTX : LASX3R_Store<0x384c0000>;

def XVSTELM_B : LASX2RI8I5_XRII<0x33800000>;
def XVSTELM_H : LASX2RI8I4_XRII<0x33400000, simm8_lsl1>;
def XVSTELM_W : LASX2RI8I3_XRII<0x33200000, simm8_lsl2>;
def XVSTELM_D : LASX2RI8I2_XRII<0x33100000, simm8_lsl3>;
} // mayLoad = 0, mayStore = 1

} // hasSideEffects = 0, Predicates = [HasExtLASX]

/// Pseudo-instructions

let Predicates = [HasExtLASX] in {

let hasSideEffects = 0, mayLoad = 0, mayStore = 0, isCodeGenOnly = 0,
    isAsmParserOnly = 1 in {
def PseudoXVREPLI_B : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [],
                             "xvrepli.b", "$xd, $imm">;
def PseudoXVREPLI_H : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [],
                             "xvrepli.h", "$xd, $imm">;
def PseudoXVREPLI_W : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [],
                             "xvrepli.w", "$xd, $imm">;
def PseudoXVREPLI_D : Pseudo<(outs LASX256:$xd), (ins simm10:$imm), [],
                             "xvrepli.d", "$xd, $imm">;
}

def PseudoXVBNZ_B : VecCond<loongarch_vall_nonzero, v32i8, LASX256>;
def PseudoXVBNZ_H : VecCond<loongarch_vall_nonzero, v16i16, LASX256>;
def PseudoXVBNZ_W : VecCond<loongarch_vall_nonzero, v8i32, LASX256>;
def PseudoXVBNZ_D : VecCond<loongarch_vall_nonzero, v4i64, LASX256>;
def PseudoXVBNZ : VecCond<loongarch_vany_nonzero, v32i8, LASX256>;

def PseudoXVBZ_B : VecCond<loongarch_vall_zero, v32i8, LASX256>;
def PseudoXVBZ_H : VecCond<loongarch_vall_zero, v16i16, LASX256>;
def PseudoXVBZ_W : VecCond<loongarch_vall_zero, v8i32, LASX256>;
def PseudoXVBZ_D : VecCond<loongarch_vall_zero, v4i64, LASX256>;
def PseudoXVBZ : VecCond<loongarch_vany_zero, v32i8, LASX256>;

let usesCustomInserter = 1, Constraints = "$xd = $dst" in {
def PseudoXVINSGR2VR_B
  : Pseudo<(outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, uimm5:$imm)>;
def PseudoXVINSGR2VR_H
  : Pseudo<(outs LASX256:$dst), (ins LASX256:$xd, GPR:$rj, uimm4:$imm)>;
} //  usesCustomInserter = 1, Constraints = "$xd = $dst"

let usesCustomInserter = 1, hasSideEffects = 0, mayLoad = 0, mayStore = 0 in {
def PseudoXVMSKLTZ_B : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKLTZ_H : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKLTZ_W : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKLTZ_D : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKGEZ_B : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKEQZ_B : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
def PseudoXVMSKNEZ_B : Pseudo<(outs GPR:$rd), (ins LASX256:$vj)>;
} // usesCustomInserter = 1, hasSideEffects = 0, mayLoad = 0, mayStore = 0

} // Predicates = [HasExtLASX]

multiclass PatXr<SDPatternOperator OpNode, string Inst> {
  def : Pat<(v32i8 (OpNode (v32i8 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj)>;
  def : Pat<(v16i16 (OpNode (v16i16 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj)>;
  def : Pat<(v8i32 (OpNode (v8i32 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj)>;
  def : Pat<(v4i64 (OpNode (v4i64 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj)>;
}

multiclass PatXrF<SDPatternOperator OpNode, string Inst> {
  def : Pat<(v8f32 (OpNode (v8f32 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_S") LASX256:$xj)>;
  def : Pat<(v4f64 (OpNode (v4f64 LASX256:$xj))),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj)>;
}

multiclass PatXrXr<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatXrXrF<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v8f32 LASX256:$xj), (v8f32 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_S") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v4f64 LASX256:$xj), (v4f64 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatXrXrU<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_BU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_HU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_WU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_DU") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatXrSimm5<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_simm5 simm5:$imm))),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, simm5:$imm)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_simm5 simm5:$imm))),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, simm5:$imm)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_simm5 simm5:$imm))),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, simm5:$imm)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_simm5 simm5:$imm))),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, simm5:$imm)>;
}

multiclass PatXrUimm5<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm5 uimm5:$imm))),
            (!cast<LAInst>(Inst#"_BU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_uimm5 uimm5:$imm))),
            (!cast<LAInst>(Inst#"_HU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_uimm5 uimm5:$imm))),
            (!cast<LAInst>(Inst#"_WU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_uimm5 uimm5:$imm))),
            (!cast<LAInst>(Inst#"_DU") LASX256:$xj, uimm5:$imm)>;
}

multiclass PatXrXrXr<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xd), (v32i8 LASX256:$xj),
                    (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_B") LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v16i16 LASX256:$xd), (v16i16 LASX256:$xj),
                    (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_H") LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v8i32 LASX256:$xd), (v8i32 LASX256:$xj),
                    (v8i32 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_W") LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v4i64 LASX256:$xd), (v4i64 LASX256:$xj),
                    (v4i64 LASX256:$xk)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
}

multiclass PatShiftXrXr<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (and vsplati8_imm_eq_7,
                                              (v32i8 LASX256:$xk))),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (and vsplati16_imm_eq_15,
                                               (v16i16 LASX256:$xk))),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (and vsplati32_imm_eq_31,
                                              (v8i32 LASX256:$xk))),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (and vsplati64_imm_eq_63,
                                              (v4i64 LASX256:$xk))),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatShiftXrSplatUimm<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm3 uimm3:$imm))),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, uimm3:$imm)>;
  def : Pat<(OpNode (v16i16 LASX256:$xj), (v16i16 (SplatPat_uimm4 uimm4:$imm))),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, uimm4:$imm)>;
  def : Pat<(OpNode (v8i32 LASX256:$xj), (v8i32 (SplatPat_uimm5 uimm5:$imm))),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(OpNode (v4i64 LASX256:$xj), (v4i64 (SplatPat_uimm6 uimm6:$imm))),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, uimm6:$imm)>;
}

multiclass PatShiftXrUimm<SDPatternOperator OpNode, string Inst> {
  def : Pat<(OpNode(v32i8 LASX256:$vj), uimm3:$imm),
            (!cast<LAInst>(Inst#"_B") LASX256:$vj, uimm3:$imm)>;
  def : Pat<(OpNode(v16i16 LASX256:$vj), uimm4:$imm),
            (!cast<LAInst>(Inst#"_H") LASX256:$vj, uimm4:$imm)>;
  def : Pat<(OpNode(v8i32 LASX256:$vj), uimm5:$imm),
            (!cast<LAInst>(Inst#"_W") LASX256:$vj, uimm5:$imm)>;
  def : Pat<(OpNode(v4i64 LASX256:$vj), uimm6:$imm),
            (!cast<LAInst>(Inst#"_D") LASX256:$vj, uimm6:$imm)>;
}

multiclass PatCCXrSimm5<CondCode CC, string Inst> {
  def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj),
                          (v32i8 (SplatPat_simm5 simm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, simm5:$imm)>;
  def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj),
                           (v16i16 (SplatPat_simm5 simm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, simm5:$imm)>;
  def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj),
                          (v8i32 (SplatPat_simm5 simm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, simm5:$imm)>;
  def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj),
                          (v4i64 (SplatPat_simm5 simm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, simm5:$imm)>;
}

multiclass PatCCXrUimm5<CondCode CC, string Inst> {
  def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj),
                          (v32i8 (SplatPat_uimm5 uimm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_BU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj),
                           (v16i16 (SplatPat_uimm5 uimm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_HU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj),
                          (v8i32 (SplatPat_uimm5 uimm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_WU") LASX256:$xj, uimm5:$imm)>;
  def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj),
                          (v4i64 (SplatPat_uimm5 uimm5:$imm)), CC)),
            (!cast<LAInst>(Inst#"_DU") LASX256:$xj, uimm5:$imm)>;
}

multiclass PatCCXrXr<CondCode CC, string Inst> {
  def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), (v32i8 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_B") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), (v16i16 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_H") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), (v8i32 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_W") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), (v4i64 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatCCXrXrU<CondCode CC, string Inst> {
  def : Pat<(v32i8 (setcc (v32i8 LASX256:$xj), (v32i8 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_BU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v16i16 (setcc (v16i16 LASX256:$xj), (v16i16 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_HU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v8i32 (setcc (v8i32 LASX256:$xj), (v8i32 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_WU") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v4i64 (setcc (v4i64 LASX256:$xj), (v4i64 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_DU") LASX256:$xj, LASX256:$xk)>;
}

multiclass PatCCXrXrF<CondCode CC, string Inst> {
  def : Pat<(v8i32 (setcc (v8f32 LASX256:$xj), (v8f32 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_S") LASX256:$xj, LASX256:$xk)>;
  def : Pat<(v4i64 (setcc (v4f64 LASX256:$xj), (v4f64 LASX256:$xk), CC)),
            (!cast<LAInst>(Inst#"_D") LASX256:$xj, LASX256:$xk)>;
}

multiclass PairInsertExtractPatV8<ValueType vecty, ValueType elemty> {
  foreach imm1 = 0...3 in {
    foreach imm2 = 0...3 in {
      defvar Imm = !or(!shl(imm2, 4), imm1);
      def : Pat<(vector_insert (vector_insert vecty:$xd,
                    (elemty (vector_extract vecty:$xj, imm1)), imm2),
                    (elemty (vector_extract vecty:$xj, !add(imm1, 4))),
                    !add(imm2, 4)),
                (XVEXTRINS_W $xd, $xj, Imm)>;
    }
  }
}

multiclass PairInsertExtractPatV4<ValueType vecty, ValueType elemty> {
  foreach imm1 = 0...1 in {
    foreach imm2 = 0...1 in {
      defvar Imm = !or(!shl(imm2, 4), imm1);
      def : Pat<(vector_insert (vector_insert vecty:$xd,
                    (elemty (vector_extract vecty:$xj, imm1)), imm2),
                    (elemty (vector_extract vecty:$xj, !add(imm1, 2))),
                    !add(imm2, 2)),
                (XVEXTRINS_D $xd, $xj, Imm)>;
    }
  }
}

let Predicates = [HasExtLASX] in {

// XVADD_{B/H/W/D}
defm : PatXrXr<add, "XVADD">;
// XVSUB_{B/H/W/D}
defm : PatXrXr<sub, "XVSUB">;

// XVADDI_{B/H/W/D}U
defm : PatXrUimm5<add, "XVADDI">;
// XVSUBI_{B/H/W/D}U
defm : PatXrUimm5<sub, "XVSUBI">;

// XVNEG_{B/H/W/D}
def : Pat<(sub immAllZerosV, (v32i8 LASX256:$xj)), (XVNEG_B LASX256:$xj)>;
def : Pat<(sub immAllZerosV, (v16i16 LASX256:$xj)), (XVNEG_H LASX256:$xj)>;
def : Pat<(sub immAllZerosV, (v8i32 LASX256:$xj)), (XVNEG_W LASX256:$xj)>;
def : Pat<(sub immAllZerosV, (v4i64 LASX256:$xj)), (XVNEG_D LASX256:$xj)>;

// XVMAX[I]_{B/H/W/D}[U]
defm : PatXrXr<smax, "XVMAX">;
defm : PatXrXrU<umax, "XVMAX">;
defm : PatXrSimm5<smax, "XVMAXI">;
defm : PatXrUimm5<umax, "XVMAXI">;

// XVMIN[I]_{B/H/W/D}[U]
defm : PatXrXr<smin, "XVMIN">;
defm : PatXrXrU<umin, "XVMIN">;
defm : PatXrSimm5<smin, "XVMINI">;
defm : PatXrUimm5<umin, "XVMINI">;

// XVMUL_{B/H/W/D}
defm : PatXrXr<mul, "XVMUL">;

// XVMUH_{B/H/W/D}[U]
defm : PatXrXr<mulhs, "XVMUH">;
defm : PatXrXrU<mulhu, "XVMUH">;

// XVMADD_{B/H/W/D}
defm : PatXrXrXr<muladd, "XVMADD">;
// XVMSUB_{B/H/W/D}
defm : PatXrXrXr<mulsub, "XVMSUB">;

// XVDIV_{B/H/W/D}[U]
defm : PatXrXr<sdiv, "XVDIV">;
defm : PatXrXrU<udiv, "XVDIV">;

// XVMOD_{B/H/W/D}[U]
defm : PatXrXr<srem, "XVMOD">;
defm : PatXrXrU<urem, "XVMOD">;

// XVAND_V
foreach vt = [v32i8, v16i16, v8i32, v4i64] in
def : Pat<(and (vt LASX256:$xj), (vt LASX256:$xk)),
          (XVAND_V LASX256:$xj, LASX256:$xk)>;
// XVOR_V
foreach vt = [v32i8, v16i16, v8i32, v4i64] in
def : Pat<(or (vt LASX256:$xj), (vt LASX256:$xk)),
          (XVOR_V LASX256:$xj, LASX256:$xk)>;
// XVXOR_V
foreach vt = [v32i8, v16i16, v8i32, v4i64] in
def : Pat<(xor (vt LASX256:$xj), (vt LASX256:$xk)),
          (XVXOR_V LASX256:$xj, LASX256:$xk)>;
// XVNOR_V
foreach vt = [v32i8, v16i16, v8i32, v4i64] in
def : Pat<(vnot (or (vt LASX256:$xj), (vt LASX256:$xk))),
          (XVNOR_V LASX256:$xj, LASX256:$xk)>;

// XVANDI_B
def : Pat<(and (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))),
          (XVANDI_B LASX256:$xj, uimm8:$imm)>;
// XVORI_B
def : Pat<(or (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))),
          (XVORI_B LASX256:$xj, uimm8:$imm)>;

// XVXORI_B
def : Pat<(xor (v32i8 LASX256:$xj), (v32i8 (SplatPat_uimm8 uimm8:$imm))),
          (XVXORI_B LASX256:$xj, uimm8:$imm)>;

// XVBSLL_V
foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32,
              v4f64] in def : Pat<(loongarch_vbsll(vt LASX256:$xj), uimm5:$imm),
                                  (XVBSLL_V LASX256:$xj, uimm5:$imm)>;

// XVBSRL_V
foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32,
              v4f64] in def : Pat<(loongarch_vbsrl(vt LASX256:$xj), uimm5:$imm),
                                  (XVBSRL_V LASX256:$xj, uimm5:$imm)>;

// XVSLL[I]_{B/H/W/D}
defm : PatXrXr<shl, "XVSLL">;
defm : PatShiftXrXr<shl, "XVSLL">;
defm : PatShiftXrSplatUimm<shl, "XVSLLI">;
defm : PatShiftXrUimm<loongarch_vslli, "XVSLLI">;

// XVSRL[I]_{B/H/W/D}
defm : PatXrXr<srl, "XVSRL">;
defm : PatShiftXrXr<srl, "XVSRL">;
defm : PatShiftXrSplatUimm<srl, "XVSRLI">;
defm : PatShiftXrUimm<loongarch_vsrli, "XVSRLI">;

// XVSRA[I]_{B/H/W/D}
defm : PatXrXr<sra, "XVSRA">;
defm : PatShiftXrXr<sra, "XVSRA">;
defm : PatShiftXrSplatUimm<sra, "XVSRAI">;

// XVCLZ_{B/H/W/D}
defm : PatXr<ctlz, "XVCLZ">;

// XVPCNT_{B/H/W/D}
defm : PatXr<ctpop, "XVPCNT">;

// XVBITCLR_{B/H/W/D}
def : Pat<(and v32i8:$xj, (vnot (shl vsplat_imm_eq_1, v32i8:$xk))),
          (v32i8 (XVBITCLR_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(and v16i16:$xj, (vnot (shl vsplat_imm_eq_1, v16i16:$xk))),
          (v16i16 (XVBITCLR_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(and v8i32:$xj, (vnot (shl vsplat_imm_eq_1, v8i32:$xk))),
          (v8i32 (XVBITCLR_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(and v4i64:$xj, (vnot (shl vsplat_imm_eq_1, v4i64:$xk))),
          (v4i64 (XVBITCLR_D v4i64:$xj, v4i64:$xk))>;
def : Pat<(and v32i8:$xj, (vnot (shl vsplat_imm_eq_1,
                                     (vsplati8imm7 v32i8:$xk)))),
          (v32i8 (XVBITCLR_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(and v16i16:$xj, (vnot (shl vsplat_imm_eq_1,
                                     (vsplati16imm15 v16i16:$xk)))),
          (v16i16 (XVBITCLR_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(and v8i32:$xj, (vnot (shl vsplat_imm_eq_1,
                                     (vsplati32imm31 v8i32:$xk)))),
          (v8i32 (XVBITCLR_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(and v4i64:$xj, (vnot (shl vsplat_imm_eq_1,
                                     (vsplati64imm63 v4i64:$xk)))),
          (v4i64 (XVBITCLR_D v4i64:$xj, v4i64:$xk))>;

// XVBITCLRI_{B/H/W/D}
def : Pat<(and (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_inv_pow2 uimm3:$imm))),
          (XVBITCLRI_B LASX256:$xj, uimm3:$imm)>;
def : Pat<(and (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_inv_pow2 uimm4:$imm))),
          (XVBITCLRI_H LASX256:$xj, uimm4:$imm)>;
def : Pat<(and (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_inv_pow2 uimm5:$imm))),
          (XVBITCLRI_W LASX256:$xj, uimm5:$imm)>;
def : Pat<(and (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_inv_pow2 uimm6:$imm))),
          (XVBITCLRI_D LASX256:$xj, uimm6:$imm)>;

// XVBITSET_{B/H/W/D}
def : Pat<(or v32i8:$xj, (shl vsplat_imm_eq_1, v32i8:$xk)),
          (v32i8 (XVBITSET_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(or v16i16:$xj, (shl vsplat_imm_eq_1, v16i16:$xk)),
          (v16i16 (XVBITSET_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(or v8i32:$xj, (shl vsplat_imm_eq_1, v8i32:$xk)),
          (v8i32 (XVBITSET_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(or v4i64:$xj, (shl vsplat_imm_eq_1, v4i64:$xk)),
          (v4i64 (XVBITSET_D v4i64:$xj, v4i64:$xk))>;
def : Pat<(or v32i8:$xj, (shl vsplat_imm_eq_1, (vsplati8imm7 v32i8:$xk))),
          (v32i8 (XVBITSET_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(or v16i16:$xj, (shl vsplat_imm_eq_1, (vsplati16imm15 v16i16:$xk))),
          (v16i16 (XVBITSET_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(or v8i32:$xj, (shl vsplat_imm_eq_1, (vsplati32imm31 v8i32:$xk))),
          (v8i32 (XVBITSET_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(or v4i64:$xj, (shl vsplat_imm_eq_1, (vsplati64imm63 v4i64:$xk))),
          (v4i64 (XVBITSET_D v4i64:$xj, v4i64:$xk))>;

// XVBITSETI_{B/H/W/D}
def : Pat<(or (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_pow2 uimm3:$imm))),
          (XVBITSETI_B LASX256:$xj, uimm3:$imm)>;
def : Pat<(or (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_pow2 uimm4:$imm))),
          (XVBITSETI_H LASX256:$xj, uimm4:$imm)>;
def : Pat<(or (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_pow2 uimm5:$imm))),
          (XVBITSETI_W LASX256:$xj, uimm5:$imm)>;
def : Pat<(or (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_pow2 uimm6:$imm))),
          (XVBITSETI_D LASX256:$xj, uimm6:$imm)>;

// XVBITREV_{B/H/W/D}
def : Pat<(xor v32i8:$xj, (shl vsplat_imm_eq_1, v32i8:$xk)),
          (v32i8 (XVBITREV_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(xor v16i16:$xj, (shl vsplat_imm_eq_1, v16i16:$xk)),
          (v16i16 (XVBITREV_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(xor v8i32:$xj, (shl vsplat_imm_eq_1, v8i32:$xk)),
          (v8i32 (XVBITREV_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(xor v4i64:$xj, (shl vsplat_imm_eq_1, v4i64:$xk)),
          (v4i64 (XVBITREV_D v4i64:$xj, v4i64:$xk))>;
def : Pat<(xor v32i8:$xj, (shl vsplat_imm_eq_1, (vsplati8imm7 v32i8:$xk))),
          (v32i8 (XVBITREV_B v32i8:$xj, v32i8:$xk))>;
def : Pat<(xor v16i16:$xj, (shl vsplat_imm_eq_1, (vsplati16imm15 v16i16:$xk))),
          (v16i16 (XVBITREV_H v16i16:$xj, v16i16:$xk))>;
def : Pat<(xor v8i32:$xj, (shl vsplat_imm_eq_1, (vsplati32imm31 v8i32:$xk))),
          (v8i32 (XVBITREV_W v8i32:$xj, v8i32:$xk))>;
def : Pat<(xor v4i64:$xj, (shl vsplat_imm_eq_1, (vsplati64imm63 v4i64:$xk))),
          (v4i64 (XVBITREV_D v4i64:$xj, v4i64:$xk))>;

// XVBITREVI_{B/H/W/D}
def : Pat<(xor (v32i8 LASX256:$xj), (v32i8 (vsplat_uimm_pow2 uimm3:$imm))),
          (XVBITREVI_B LASX256:$xj, uimm3:$imm)>;
def : Pat<(xor (v16i16 LASX256:$xj), (v16i16 (vsplat_uimm_pow2 uimm4:$imm))),
          (XVBITREVI_H LASX256:$xj, uimm4:$imm)>;
def : Pat<(xor (v8i32 LASX256:$xj), (v8i32 (vsplat_uimm_pow2 uimm5:$imm))),
          (XVBITREVI_W LASX256:$xj, uimm5:$imm)>;
def : Pat<(xor (v4i64 LASX256:$xj), (v4i64 (vsplat_uimm_pow2 uimm6:$imm))),
          (XVBITREVI_D LASX256:$xj, uimm6:$imm)>;

// Vector bswaps
def : Pat<(bswap (v16i16 LASX256:$xj)), (XVSHUF4I_B LASX256:$xj, 0b10110001)>;
def : Pat<(bswap (v8i32 LASX256:$xj)), (XVSHUF4I_B LASX256:$xj, 0b00011011)>;
def : Pat<(bswap (v4i64 LASX256:$xj)),
          (XVSHUF4I_W (XVSHUF4I_B LASX256:$xj, 0b00011011), 0b10110001)>;

// XVFADD_{S/D}
defm : PatXrXrF<fadd, "XVFADD">;

// XVFSUB_{S/D}
defm : PatXrXrF<fsub, "XVFSUB">;

// XVFMUL_{S/D}
defm : PatXrXrF<fmul, "XVFMUL">;

// XVFDIV_{S/D}
defm : PatXrXrF<fdiv, "XVFDIV">;

// XVFMADD_{S/D}
def : Pat<(fma v8f32:$xj, v8f32:$xk, v8f32:$xa),
          (XVFMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fma v4f64:$xj, v4f64:$xk, v4f64:$xa),
          (XVFMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;

// XVFMSUB_{S/D}
def : Pat<(fma v8f32:$xj, v8f32:$xk, (fneg v8f32:$xa)),
          (XVFMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fma v4f64:$xj, v4f64:$xk, (fneg v4f64:$xa)),
          (XVFMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;

// XVFNMADD_{S/D}
def : Pat<(fneg (fma v8f32:$xj, v8f32:$xk, v8f32:$xa)),
          (XVFNMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fneg (fma v4f64:$xj, v4f64:$xk, v4f64:$xa)),
          (XVFNMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;
def : Pat<(fma_nsz (fneg v8f32:$xj), v8f32:$xk, (fneg v8f32:$xa)),
          (XVFNMADD_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fma_nsz (fneg v4f64:$xj), v4f64:$xk, (fneg v4f64:$xa)),
          (XVFNMADD_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;

// XVFNMSUB_{S/D}
def : Pat<(fneg (fma v8f32:$xj, v8f32:$xk, (fneg v8f32:$xa))),
          (XVFNMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fneg (fma v4f64:$xj, v4f64:$xk, (fneg v4f64:$xa))),
          (XVFNMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;
def : Pat<(fma_nsz (fneg v8f32:$xj), v8f32:$xk, v8f32:$xa),
          (XVFNMSUB_S v8f32:$xj, v8f32:$xk, v8f32:$xa)>;
def : Pat<(fma_nsz (fneg v4f64:$xj), v4f64:$xk, v4f64:$xa),
          (XVFNMSUB_D v4f64:$xj, v4f64:$xk, v4f64:$xa)>;

// XVFSQRT_{S/D}
defm : PatXrF<fsqrt, "XVFSQRT">;

// XVRECIP_{S/D}
def : Pat<(fdiv vsplatf32_fpimm_eq_1, v8f32:$xj),
          (XVFRECIP_S v8f32:$xj)>;
def : Pat<(fdiv vsplatf64_fpimm_eq_1, v4f64:$xj),
          (XVFRECIP_D v4f64:$xj)>;

// XVFRSQRT_{S/D}
def : Pat<(fdiv vsplatf32_fpimm_eq_1, (fsqrt v8f32:$xj)),
          (XVFRSQRT_S v8f32:$xj)>;
def : Pat<(fdiv vsplatf64_fpimm_eq_1, (fsqrt v4f64:$xj)),
          (XVFRSQRT_D v4f64:$xj)>;

// XVSEQ[I]_{B/H/W/D}
defm : PatCCXrSimm5<SETEQ, "XVSEQI">;
defm : PatCCXrXr<SETEQ, "XVSEQ">;

// XVSLE[I]_{B/H/W/D}[U]
defm : PatCCXrSimm5<SETLE, "XVSLEI">;
defm : PatCCXrUimm5<SETULE, "XVSLEI">;
defm : PatCCXrXr<SETLE, "XVSLE">;
defm : PatCCXrXrU<SETULE, "XVSLE">;

// XVSLT[I]_{B/H/W/D}[U]
defm : PatCCXrSimm5<SETLT, "XVSLTI">;
defm : PatCCXrUimm5<SETULT, "XVSLTI">;
defm : PatCCXrXr<SETLT, "XVSLT">;
defm : PatCCXrXrU<SETULT, "XVSLT">;

// XVFCMP.cond.{S/D}
defm : PatCCXrXrF<SETEQ, "XVFCMP_CEQ">;
defm : PatCCXrXrF<SETOEQ, "XVFCMP_CEQ">;
defm : PatCCXrXrF<SETUEQ, "XVFCMP_CUEQ">;

defm : PatCCXrXrF<SETLE, "XVFCMP_CLE">;
defm : PatCCXrXrF<SETOLE, "XVFCMP_CLE">;
defm : PatCCXrXrF<SETULE, "XVFCMP_CULE">;

defm : PatCCXrXrF<SETLT, "XVFCMP_CLT">;
defm : PatCCXrXrF<SETOLT, "XVFCMP_CLT">;
defm : PatCCXrXrF<SETULT, "XVFCMP_CULT">;

defm : PatCCXrXrF<SETNE, "XVFCMP_CNE">;
defm : PatCCXrXrF<SETONE, "XVFCMP_CNE">;
defm : PatCCXrXrF<SETUNE, "XVFCMP_CUNE">;

defm : PatCCXrXrF<SETO, "XVFCMP_COR">;
defm : PatCCXrXrF<SETUO, "XVFCMP_CUN">;

// Insert two elements extracted from vector into vector. (The positions
// of the two elements must be same in the source or destination vector's
// front and back 128bits.)
// 2*XVPICKVE2GR_{W/D} + 2*XVINSGR2VR_{W/D} -> XVEXTRINS_{W/D}
// XVPERMI_D + 2*XVPICKVE2GR_{B/H} + 2*PseudoXVINSGR2VR_{B/H} -> XVEXTRINS_{W/D}
foreach imm1 = 0...15 in {
  foreach imm2 = 0...15 in {
    defvar Imm = !or(!shl(imm2, 4), imm1);
    def : Pat<(vector_insert (vector_insert v32i8:$xd,
                  (GRLenVT (vector_extract v32i8:$xj, imm1)), imm2),
                  (GRLenVT (vector_extract v32i8:$xj, !add(imm1, 16))),
                  !add(imm2, 16)),
              (XVEXTRINS_B $xd, $xj, Imm)>;
  }
}

foreach imm1 = 0...7 in {
  foreach imm2 = 0...7 in {
    defvar Imm = !or(!shl(imm2, 4), imm1);
    def : Pat<(vector_insert (vector_insert v16i16:$xd,
                  (GRLenVT (vector_extract v16i16:$xj, imm1)), imm2),
                  (GRLenVT (vector_extract v16i16:$xj, !add(imm1, 8))),
                  !add(imm2, 8)),
              (XVEXTRINS_H $xd, $xj, Imm)>;
  }
}

defm : PairInsertExtractPatV8<v8i32, GRLenVT>;
defm : PairInsertExtractPatV8<v8f32, f32>;
defm : PairInsertExtractPatV4<v4i64, GRLenVT>;
defm : PairInsertExtractPatV4<v4f64, f64>;

// PseudoXVINSGR2VR_{B/H}
def : Pat<(vector_insert v32i8:$xd, GRLenVT:$rj, uimm5:$imm),
          (PseudoXVINSGR2VR_B v32i8:$xd, GRLenVT:$rj, uimm5:$imm)>;
def : Pat<(vector_insert v16i16:$xd, GRLenVT:$rj, uimm4:$imm),
          (PseudoXVINSGR2VR_H v16i16:$xd, GRLenVT:$rj, uimm4:$imm)>;

// XVINSGR2VR_{W/D}
def : Pat<(vector_insert v8i32:$xd, GRLenVT:$rj, uimm3:$imm),
          (XVINSGR2VR_W v8i32:$xd, GRLenVT:$rj, uimm3:$imm)>;
def : Pat<(vector_insert v4i64:$xd, GRLenVT:$rj, uimm2:$imm),
          (XVINSGR2VR_D v4i64:$xd, GRLenVT:$rj, uimm2:$imm)>;
def : Pat<(vector_insert v8f32:$vd, (loongarch_movgr2fr_w_la64 GPR:$rj), uimm3:$imm),
          (XVINSGR2VR_W $vd, $rj, uimm3:$imm)>;
def : Pat<(vector_insert v4f64:$vd, (f64 (bitconvert i64:$rj)), uimm2:$imm),
          (XVINSGR2VR_D $vd, $rj, uimm2:$imm)>;
def : Pat<(vector_insert v8f32:$xd, (f32 (vector_extract v8f32:$xj, uimm3:$imm1)), uimm3:$imm2),
          (XVINSGR2VR_W $xd, (XVPICKVE2GR_W v8f32:$xj, uimm3:$imm1), uimm3:$imm2)>;
def : Pat<(vector_insert v4f64:$xd, (f64 (vector_extract v4f64:$xj, uimm2:$imm1)), uimm2:$imm2),
          (XVINSGR2VR_D $xd, (XVPICKVE2GR_D v4f64:$xj, uimm2:$imm1), uimm2:$imm2)>;
def : Pat<(vector_insert v8f32:$xd, FPR32:$fj, uimm3:$imm),
          (XVINSGR2VR_W $xd, (COPY_TO_REGCLASS FPR32:$fj, GPR), uimm3:$imm)>;
def : Pat<(vector_insert v4f64:$xd, FPR64:$fj, uimm2:$imm),
          (XVINSGR2VR_D $xd, (COPY_TO_REGCLASS FPR64:$fj, GPR), uimm2:$imm)>;

// scalar_to_vector
def : Pat<(v8f32 (scalar_to_vector FPR32:$fj)),
          (SUBREG_TO_REG (i64 0), FPR32:$fj, sub_32)>;
def : Pat<(v4f64 (scalar_to_vector FPR64:$fj)),
          (SUBREG_TO_REG (i64 0), FPR64:$fj, sub_64)>; 

// XVPICKVE2GR_W[U]
def : Pat<(loongarch_vpick_sext_elt v8i32:$xd, uimm3:$imm, i32),
          (XVPICKVE2GR_W v8i32:$xd, uimm3:$imm)>;
def : Pat<(loongarch_vpick_zext_elt v8i32:$xd, uimm3:$imm, i32),
          (XVPICKVE2GR_WU v8i32:$xd, uimm3:$imm)>;

// XVREPLGR2VR_{B/H/W/D}
def : Pat<(lasxsplati8 GPR:$rj), (XVREPLGR2VR_B GPR:$rj)>;
def : Pat<(lasxsplati16 GPR:$rj), (XVREPLGR2VR_H GPR:$rj)>;
def : Pat<(lasxsplati32 GPR:$rj), (XVREPLGR2VR_W GPR:$rj)>;
def : Pat<(lasxsplati64 GPR:$rj), (XVREPLGR2VR_D GPR:$rj)>;

def : Pat<(v32i8 (loongarch_vreplgr2vr GRLenVT:$rj)),
          (v32i8 (XVREPLGR2VR_B GRLenVT:$rj))>;
def : Pat<(v16i16 (loongarch_vreplgr2vr GRLenVT:$rj)),
          (v16i16 (XVREPLGR2VR_H GRLenVT:$rj))>;
def : Pat<(v8i32 (loongarch_vreplgr2vr GRLenVT:$rj)),
          (v8i32 (XVREPLGR2VR_W GRLenVT:$rj))>;
def : Pat<(v4i64 (loongarch_vreplgr2vr GRLenVT:$rj)),
          (v4i64 (XVREPLGR2VR_D GRLenVT:$rj))>;

// XVREPLVE_{B/H/W/D}
def : Pat<(loongarch_vreplve v32i8:$xj, GRLenVT:$rk),
          (XVREPLVE_B v32i8:$xj, GRLenVT:$rk)>;
def : Pat<(loongarch_vreplve v16i16:$xj, GRLenVT:$rk),
          (XVREPLVE_H v16i16:$xj, GRLenVT:$rk)>;
def : Pat<(loongarch_vreplve v8i32:$xj, GRLenVT:$rk),
          (XVREPLVE_W v8i32:$xj, GRLenVT:$rk)>;
def : Pat<(loongarch_vreplve v4i64:$xj, GRLenVT:$rk),
          (XVREPLVE_D v4i64:$xj, GRLenVT:$rk)>;

// XVSHUF_{B/H/W/D}
def : Pat<(loongarch_vshuf v32i8:$xa, v32i8:$xj, v32i8:$xk),
          (XVSHUF_B v32i8:$xj, v32i8:$xk, v32i8:$xa)>;
def : Pat<(loongarch_vshuf v16i16:$xd, v16i16:$xj, v16i16:$xk),
          (XVSHUF_H v16i16:$xd, v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vshuf v8i32:$xd, v8i32:$xj, v8i32:$xk),
          (XVSHUF_W v8i32:$xd, v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vshuf v4i64:$xd, v4i64:$xj, v4i64:$xk),
          (XVSHUF_D v4i64:$xd, v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vshuf v8i32:$xd, v8f32:$xj, v8f32:$xk),
          (XVSHUF_W v8i32:$xd, v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vshuf v4i64:$xd, v4f64:$xj, v4f64:$xk),
          (XVSHUF_D v4i64:$xd, v4f64:$xj, v4f64:$xk)>;

// XVPICKEV_{B/H/W/D}
def : Pat<(loongarch_vpickev v32i8:$xj, v32i8:$xk),
          (XVPICKEV_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vpickev v16i16:$xj, v16i16:$xk),
          (XVPICKEV_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vpickev v8i32:$xj, v8i32:$xk),
          (XVPICKEV_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vpickev v4i64:$xj, v4i64:$xk),
          (XVPICKEV_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vpickev v8f32:$xj, v8f32:$xk),
          (XVPICKEV_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vpickev v4f64:$xj, v4f64:$xk),
          (XVPICKEV_D v4f64:$xj, v4f64:$xk)>;

// XVPICKOD_{B/H/W/D}
def : Pat<(loongarch_vpickod v32i8:$xj, v32i8:$xk),
          (XVPICKOD_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vpickod v16i16:$xj, v16i16:$xk),
          (XVPICKOD_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vpickod v8i32:$xj, v8i32:$xk),
          (XVPICKOD_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vpickod v4i64:$xj, v4i64:$xk),
          (XVPICKOD_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vpickod v8f32:$xj, v8f32:$xk),
          (XVPICKOD_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vpickod v4f64:$xj, v4f64:$xk),
          (XVPICKOD_D v4f64:$xj, v4f64:$xk)>;

// XVPACKEV_{B/H/W/D}
def : Pat<(loongarch_vpackev v32i8:$xj, v32i8:$xk),
          (XVPACKEV_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vpackev v16i16:$xj, v16i16:$xk),
          (XVPACKEV_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vpackev v8i32:$xj, v8i32:$xk),
          (XVPACKEV_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vpackev v4i64:$xj, v4i64:$xk),
          (XVPACKEV_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vpackev v8f32:$xj, v8f32:$xk),
          (XVPACKEV_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vpackev v4f64:$xj, v4f64:$xk),
          (XVPACKEV_D v4f64:$xj, v4f64:$xk)>;

// XVPACKOD_{B/H/W/D}
def : Pat<(loongarch_vpackod v32i8:$xj, v32i8:$xk),
          (XVPACKOD_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vpackod v16i16:$xj, v16i16:$xk),
          (XVPACKOD_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vpackod v8i32:$xj, v8i32:$xk),
          (XVPACKOD_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vpackod v4i64:$xj, v4i64:$xk),
          (XVPACKOD_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vpackod v8f32:$xj, v8f32:$xk),
          (XVPACKOD_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vpackod v4f64:$xj, v4f64:$xk),
          (XVPACKOD_D v4f64:$xj, v4f64:$xk)>;

// XVILVL_{B/H/W/D}
def : Pat<(loongarch_vilvl v32i8:$xj, v32i8:$xk),
          (XVILVL_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vilvl v16i16:$xj, v16i16:$xk),
          (XVILVL_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vilvl v8i32:$xj, v8i32:$xk),
          (XVILVL_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vilvl v4i64:$xj, v4i64:$xk),
          (XVILVL_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vilvl v8f32:$xj, v8f32:$xk),
          (XVILVL_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vilvl v4f64:$xj, v4f64:$xk),
          (XVILVL_D v4f64:$xj, v4f64:$xk)>;

// XVILVH_{B/H/W/D}
def : Pat<(loongarch_vilvh v32i8:$xj, v32i8:$xk),
          (XVILVH_B v32i8:$xj, v32i8:$xk)>;
def : Pat<(loongarch_vilvh v16i16:$xj, v16i16:$xk),
          (XVILVH_H v16i16:$xj, v16i16:$xk)>;
def : Pat<(loongarch_vilvh v8i32:$xj, v8i32:$xk),
          (XVILVH_W v8i32:$xj, v8i32:$xk)>;
def : Pat<(loongarch_vilvh v4i64:$xj, v4i64:$xk),
          (XVILVH_D v4i64:$xj, v4i64:$xk)>;
def : Pat<(loongarch_vilvh v8f32:$xj, v8f32:$xk),
          (XVILVH_W v8f32:$xj, v8f32:$xk)>;
def : Pat<(loongarch_vilvh v4f64:$xj, v4f64:$xk),
          (XVILVH_D v4f64:$xj, v4f64:$xk)>;

// XVSHUF4I_{B/H/W}
def : Pat<(loongarch_vshuf4i v32i8:$xj, immZExt8:$ui8),
          (XVSHUF4I_B v32i8:$xj, immZExt8:$ui8)>;
def : Pat<(loongarch_vshuf4i v16i16:$xj, immZExt8:$ui8),
        (XVSHUF4I_H v16i16:$xj, immZExt8:$ui8)>;
def : Pat<(loongarch_vshuf4i v8i32:$xj, immZExt8:$ui8),
        (XVSHUF4I_W v8i32:$xj, immZExt8:$ui8)>;
def : Pat<(loongarch_vshuf4i v8f32:$xj, immZExt8:$ui8),
        (XVSHUF4I_W v8f32:$xj, immZExt8:$ui8)>;
def : Pat<(loongarch_vshuf4i_d v4i64:$xj, v4i64:$xk, immZExt8:$ui8),
          (XVSHUF4I_D v4i64:$xj, v4i64:$xk, immZExt8:$ui8)>;
def : Pat<(loongarch_vshuf4i_d v4f64:$xj, v4f64:$xk, immZExt8:$ui8),
          (XVSHUF4I_D v4f64:$xj, v4f64:$xk, immZExt8:$ui8)>;

// XVREPL128VEI_{B/H/W/D}
def : Pat<(loongarch_vreplvei v32i8:$xj, immZExt4:$ui4),
          (XVREPL128VEI_B v32i8:$xj, immZExt4:$ui4)>;
def : Pat<(loongarch_vreplvei v16i16:$xj, immZExt3:$ui3),
        (XVREPL128VEI_H v16i16:$xj, immZExt3:$ui3)>;
def : Pat<(loongarch_vreplvei v8i32:$xj, immZExt2:$ui2),
        (XVREPL128VEI_W v8i32:$xj, immZExt2:$ui2)>;
def : Pat<(loongarch_vreplvei v4i64:$xj, immZExt1:$ui1),
        (XVREPL128VEI_D v4i64:$xj, immZExt1:$ui1)>;
def : Pat<(loongarch_vreplvei v8f32:$xj, immZExt2:$ui2),
        (XVREPL128VEI_W v8f32:$xj, immZExt2:$ui2)>;
def : Pat<(loongarch_vreplvei v4f64:$xj, immZExt1:$ui1),
        (XVREPL128VEI_D v4f64:$xj, immZExt1:$ui1)>;

// XVPERMI_D
def : Pat<(loongarch_xvpermi v4i64:$xj, immZExt8: $ui8),
          (XVPERMI_D v4i64:$xj, immZExt8: $ui8)>;
def : Pat<(loongarch_xvpermi v4f64:$xj, immZExt8: $ui8),
          (XVPERMI_D v4f64:$xj, immZExt8: $ui8)>;

// XVREPLVE0_{W/D}
def : Pat<(lasxsplatf32 FPR32:$fj),
          (XVREPLVE0_W (SUBREG_TO_REG (i64 0), FPR32:$fj, sub_32))>;
def : Pat<(lasxsplatf64 FPR64:$fj),
          (XVREPLVE0_D (SUBREG_TO_REG (i64 0), FPR64:$fj, sub_64))>;

// VSTELM
defm : VstelmPat<truncstorei8, v32i8, XVSTELM_B, simm8, uimm5>;
defm : VstelmPat<truncstorei16, v16i16, XVSTELM_H, simm8_lsl1, uimm4>;
defm : VstelmPat<truncstorei32, v8i32, XVSTELM_W, simm8_lsl2, uimm3>;
defm : VstelmPat<store, v4i64, XVSTELM_D, simm8_lsl3, uimm2>;
defm : VstelmPat<store, v8f32, XVSTELM_W, simm8_lsl2, uimm3, f32>;
defm : VstelmPat<store, v4f64, XVSTELM_D, simm8_lsl3, uimm2, f64>;

// Loads/Stores
foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, v4f64] in {
  defm : LdPat<load, XVLD, vt>;
  def  : RegRegLdPat<load, XVLDX, vt>;
  defm : StPat<store, XVST, LASX256, vt>;
  def  : RegRegStPat<store, XVSTX, LASX256, vt>;
}

// Bitcast float/double element extracted from vector to integer.
def : Pat<(loongarch_movfr2gr_s_la64 (f32 (vector_extract v8f32:$xj, uimm3:$imm))),
          (XVPICKVE2GR_W v8f32:$xj, uimm3:$imm)>;
def : Pat<(i64 (bitconvert (f64 (vector_extract v4f64:$xj, uimm2:$imm)))),
          (XVPICKVE2GR_D v4f64:$xj, uimm2:$imm)>;

// Vector extraction with constant index.
foreach imm = 16...31 in {
  defvar Imm = !and(imm, 15);
  def : Pat<(i64 (vector_extract v32i8:$xj, imm)),
            (VPICKVE2GR_B (EXTRACT_SUBREG (XVPERMI_D v32i8:$xj, 14), sub_128),
                Imm)>;
}
foreach imm = 8...15 in {
  defvar Imm = !and(imm, 7);
  def : Pat<(i64 (vector_extract v16i16:$xj, imm)),
            (VPICKVE2GR_H (EXTRACT_SUBREG (XVPERMI_D v16i16:$xj, 14), sub_128),
                Imm)>;
}
def : Pat<(i64 (vector_extract v32i8:$xj, uimm4:$imm)),
          (VPICKVE2GR_B (EXTRACT_SUBREG v32i8:$xj, sub_128), uimm4:$imm)>;
def : Pat<(i64 (vector_extract v16i16:$xj, uimm3:$imm)),
          (VPICKVE2GR_H (EXTRACT_SUBREG v16i16:$xj, sub_128), uimm3:$imm)>;
def : Pat<(i64 (vector_extract v8i32:$xj, uimm3:$imm)),
          (XVPICKVE2GR_W v8i32:$xj, uimm3:$imm)>;
def : Pat<(i64 (vector_extract v4i64:$xj, uimm2:$imm)),
          (XVPICKVE2GR_D v4i64:$xj, uimm2:$imm)>;
def : Pat<(f32 (vector_extract v8f32:$xj, uimm3:$imm)),
          (MOVGR2FR_W (XVPICKVE2GR_W v8f32:$xj, uimm3:$imm))>;
def : Pat<(f64 (vector_extract v4f64:$xj, uimm2:$imm)),
          (MOVGR2FR_D (XVPICKVE2GR_D v4f64:$xj, uimm2:$imm))>;

// vselect
def : Pat<(v32i8 (vselect LASX256:$xd, (v32i8 (SplatPat_uimm8 uimm8:$imm)),
                          LASX256:$xj)),
          (XVBITSELI_B LASX256:$xd, LASX256:$xj, uimm8:$imm)>;
foreach vt = [v32i8, v16i16, v8i32, v4i64, v8f32, v4f64] in
  def  : Pat<(vt (vselect LASX256:$xa, LASX256:$xk, LASX256:$xj)),
             (XVBITSEL_V LASX256:$xj, LASX256:$xk, LASX256:$xa)>;

// fneg
def : Pat<(fneg (v8f32 LASX256:$xj)), (XVBITREVI_W LASX256:$xj, 31)>;
def : Pat<(fneg (v4f64 LASX256:$xj)), (XVBITREVI_D LASX256:$xj, 63)>;

// XVFFINT_{S_W/D_L}
def : Pat<(v8f32 (sint_to_fp v8i32:$vj)), (XVFFINT_S_W v8i32:$vj)>;
def : Pat<(v4f64 (sint_to_fp v4i64:$vj)), (XVFFINT_D_L v4i64:$vj)>;
def : Pat<(v4f64 (sint_to_fp v4i32:$vj)),
          (XVFFINT_D_L (VEXT2XV_D_W (SUBREG_TO_REG (i64 0), v4i32:$vj,
                                                   sub_128)))>;
def : Pat<(v4f32 (sint_to_fp v4i64:$vj)),
          (EXTRACT_SUBREG (XVFCVT_S_D (XVPERMI_D (XVFFINT_D_L v4i64:$vj), 238),
                                      (XVFFINT_D_L v4i64:$vj)),
                          sub_128)>;

// XVFFINT_{S_WU/D_LU}
def : Pat<(v8f32 (uint_to_fp v8i32:$vj)), (XVFFINT_S_WU v8i32:$vj)>;
def : Pat<(v4f64 (uint_to_fp v4i64:$vj)), (XVFFINT_D_LU v4i64:$vj)>;
def : Pat<(v4f64 (uint_to_fp v4i32:$vj)),
          (XVFFINT_D_LU (VEXT2XV_DU_WU (SUBREG_TO_REG (i64 0), v4i32:$vj,
                                                      sub_128)))>;
def : Pat<(v4f32 (uint_to_fp v4i64:$vj)),
          (EXTRACT_SUBREG (XVFCVT_S_D (XVPERMI_D (XVFFINT_D_LU v4i64:$vj), 238),
                                       (XVFFINT_D_LU v4i64:$vj)),
                          sub_128)>;

// XVFTINTRZ_{W_S/L_D}
def : Pat<(v8i32 (fp_to_sint v8f32:$vj)), (XVFTINTRZ_W_S v8f32:$vj)>;
def : Pat<(v4i64 (fp_to_sint v4f64:$vj)), (XVFTINTRZ_L_D v4f64:$vj)>;
def : Pat<(v4i64(fp_to_sint v4f32:$vj)), (VEXT2XV_D_W(SUBREG_TO_REG(i64 0),
                                             (VFTINTRZ_W_S v4f32:$vj),
                                             sub_128))>;
def : Pat<(v4i32(fp_to_sint v4f64:$vj)),
          (EXTRACT_SUBREG(XVPICKEV_W(XVPERMI_D(XVFTINTRZ_L_D v4f64:$vj), 238),
               (XVFTINTRZ_L_D v4f64:$vj)),
              sub_128)>;

// XVFTINTRZ_{W_SU/L_DU}
def : Pat<(v8i32 (fp_to_uint v8f32:$vj)), (XVFTINTRZ_WU_S v8f32:$vj)>;
def : Pat<(v4i64 (fp_to_uint v4f64:$vj)), (XVFTINTRZ_LU_D v4f64:$vj)>;
def : Pat<(v4i64(fp_to_uint v4f32:$vj)), (VEXT2XV_DU_WU(SUBREG_TO_REG(i64 0),
                                             (VFTINTRZ_WU_S v4f32:$vj),
                                             sub_128))>;
def : Pat<(v4i32(fp_to_uint v4f64:$vj)),
          (EXTRACT_SUBREG(XVPICKEV_W(XVPERMI_D(XVFTINTRZ_LU_D v4f64:$vj), 238),
               (XVFTINTRZ_LU_D v4f64:$vj)),
              sub_128)>;

// XVABSD_{B/H/W/D}[U]
defm : PatXrXr<abds, "XVABSD">;
defm : PatXrXrU<abdu, "XVABSD">;

// Vector mask set by condition
def : Pat<(loongarch_xvmskltz (v32i8 LASX256:$vj)), (PseudoXVMSKLTZ_B LASX256:$vj)>;
def : Pat<(loongarch_xvmskltz (v16i16 LASX256:$vj)), (PseudoXVMSKLTZ_H LASX256:$vj)>;
def : Pat<(loongarch_xvmskltz (v8i32 LASX256:$vj)), (PseudoXVMSKLTZ_W LASX256:$vj)>;
def : Pat<(loongarch_xvmskltz (v4i64 LASX256:$vj)), (PseudoXVMSKLTZ_D LASX256:$vj)>;
def : Pat<(loongarch_xvmskgez (v32i8 LASX256:$vj)), (PseudoXVMSKGEZ_B LASX256:$vj)>;
def : Pat<(loongarch_xvmskeqz (v32i8 LASX256:$vj)), (PseudoXVMSKEQZ_B LASX256:$vj)>;
def : Pat<(loongarch_xvmsknez (v32i8 LASX256:$vj)), (PseudoXVMSKNEZ_B LASX256:$vj)>;

// Subvector tricks
// Patterns for insert_subvector/extract_subvector
multiclass subvector_subreg_lowering<RegisterClass subRC, ValueType subVT,
                                     RegisterClass RC, ValueType VT,
                                     int hiIdx, SubRegIndex subIdx> {
  // A 128-bit subvector extract from the first 256-bit vector position is a
  // subregister copy that needs no instruction. Likewise, a 128-bit subvector
  // insert to the first 256-bit vector position is a subregister copy that needs
  // no instruction.
  def : Pat<(subVT (extract_subvector (VT RC:$src), (iPTR 0))),
            (subVT (EXTRACT_SUBREG RC:$src, subIdx))>;
  def : Pat<(VT (insert_subvector undef_or_freeze_undef, subRC:$src, (iPTR 0))),
            (VT (INSERT_SUBREG (IMPLICIT_DEF), subRC:$src, subIdx))>;

  def : Pat<(subVT (extract_subvector (VT RC:$src), (iPTR hiIdx))),
            (subVT (EXTRACT_SUBREG (XVPERMI_Q (IMPLICIT_DEF), RC:$src, 1), subIdx))>;
  def : Pat<(VT (insert_subvector RC:$vd, subRC:$vj, (iPTR 0))),
            (VT (XVPERMI_Q RC:$vd, (INSERT_SUBREG (IMPLICIT_DEF), subRC:$vj, subIdx), 48))>;
  def : Pat<(VT (insert_subvector RC:$vd, subRC:$vj, (iPTR hiIdx))),
            (VT (XVPERMI_Q RC:$vd, (INSERT_SUBREG (IMPLICIT_DEF), subRC:$vj, subIdx), 2))>;
}

defm : subvector_subreg_lowering<LSX128, v4i32, LASX256, v8i32,  4,  sub_128>;
defm : subvector_subreg_lowering<LSX128, v4f32, LASX256, v8f32,  4,  sub_128>;
defm : subvector_subreg_lowering<LSX128, v2i64, LASX256, v4i64,  2,  sub_128>;
defm : subvector_subreg_lowering<LSX128, v2f64, LASX256, v4f64,  2,  sub_128>;
defm : subvector_subreg_lowering<LSX128, v8i16, LASX256, v16i16, 8,  sub_128>;
defm : subvector_subreg_lowering<LSX128, v16i8, LASX256, v32i8,  16, sub_128>;

} // Predicates = [HasExtLASX]

/// Intrinsic pattern

class deriveLASXIntrinsic<string Inst> {
  Intrinsic ret = !cast<Intrinsic>(!tolower("int_loongarch_lasx_"#Inst));
}

let Predicates = [HasExtLASX] in {

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xj, vty:$xk),
//     (LAInst vty:$xj, vty:$xk)>;
foreach Inst = ["XVSADD_B", "XVSADD_BU", "XVSSUB_B", "XVSSUB_BU",
                "XVHADDW_H_B", "XVHADDW_HU_BU", "XVHSUBW_H_B", "XVHSUBW_HU_BU",
                "XVADDWEV_H_B", "XVADDWOD_H_B", "XVSUBWEV_H_B", "XVSUBWOD_H_B",
                "XVADDWEV_H_BU", "XVADDWOD_H_BU", "XVSUBWEV_H_BU", "XVSUBWOD_H_BU",
                "XVADDWEV_H_BU_B", "XVADDWOD_H_BU_B",
                "XVAVG_B", "XVAVG_BU", "XVAVGR_B", "XVAVGR_BU",
                "XVABSD_B", "XVABSD_BU", "XVADDA_B", "XVMUH_B", "XVMUH_BU",
                "XVMULWEV_H_B", "XVMULWOD_H_B", "XVMULWEV_H_BU", "XVMULWOD_H_BU",
                "XVMULWEV_H_BU_B", "XVMULWOD_H_BU_B", "XVSIGNCOV_B",
                "XVANDN_V", "XVORN_V", "XVROTR_B", "XVSRLR_B", "XVSRAR_B",
                "XVSEQ_B", "XVSLE_B", "XVSLE_BU", "XVSLT_B", "XVSLT_BU",
                "XVPACKEV_B", "XVPACKOD_B", "XVPICKEV_B", "XVPICKOD_B",
                "XVILVL_B", "XVILVH_B"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVSADD_H", "XVSADD_HU", "XVSSUB_H", "XVSSUB_HU",
                "XVHADDW_W_H", "XVHADDW_WU_HU", "XVHSUBW_W_H", "XVHSUBW_WU_HU",
                "XVADDWEV_W_H", "XVADDWOD_W_H", "XVSUBWEV_W_H", "XVSUBWOD_W_H",
                "XVADDWEV_W_HU", "XVADDWOD_W_HU", "XVSUBWEV_W_HU", "XVSUBWOD_W_HU",
                "XVADDWEV_W_HU_H", "XVADDWOD_W_HU_H",
                "XVAVG_H", "XVAVG_HU", "XVAVGR_H", "XVAVGR_HU",
                "XVABSD_H", "XVABSD_HU", "XVADDA_H", "XVMUH_H", "XVMUH_HU",
                "XVMULWEV_W_H", "XVMULWOD_W_H", "XVMULWEV_W_HU", "XVMULWOD_W_HU",
                "XVMULWEV_W_HU_H", "XVMULWOD_W_HU_H", "XVSIGNCOV_H", "XVROTR_H",
                "XVSRLR_H", "XVSRAR_H", "XVSRLN_B_H", "XVSRAN_B_H", "XVSRLRN_B_H",
                "XVSRARN_B_H", "XVSSRLN_B_H", "XVSSRAN_B_H", "XVSSRLN_BU_H",
                "XVSSRAN_BU_H", "XVSSRLRN_B_H", "XVSSRARN_B_H", "XVSSRLRN_BU_H",
                "XVSSRARN_BU_H",
                "XVSEQ_H", "XVSLE_H", "XVSLE_HU", "XVSLT_H", "XVSLT_HU",
                "XVPACKEV_H", "XVPACKOD_H", "XVPICKEV_H", "XVPICKOD_H",
                "XVILVL_H", "XVILVH_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVSADD_W", "XVSADD_WU", "XVSSUB_W", "XVSSUB_WU",
                "XVHADDW_D_W", "XVHADDW_DU_WU", "XVHSUBW_D_W", "XVHSUBW_DU_WU",
                "XVADDWEV_D_W", "XVADDWOD_D_W", "XVSUBWEV_D_W", "XVSUBWOD_D_W",
                "XVADDWEV_D_WU", "XVADDWOD_D_WU", "XVSUBWEV_D_WU", "XVSUBWOD_D_WU",
                "XVADDWEV_D_WU_W", "XVADDWOD_D_WU_W",
                "XVAVG_W", "XVAVG_WU", "XVAVGR_W", "XVAVGR_WU",
                "XVABSD_W", "XVABSD_WU", "XVADDA_W", "XVMUH_W", "XVMUH_WU",
                "XVMULWEV_D_W", "XVMULWOD_D_W", "XVMULWEV_D_WU", "XVMULWOD_D_WU",
                "XVMULWEV_D_WU_W", "XVMULWOD_D_WU_W", "XVSIGNCOV_W", "XVROTR_W",
                "XVSRLR_W", "XVSRAR_W", "XVSRLN_H_W", "XVSRAN_H_W", "XVSRLRN_H_W",
                "XVSRARN_H_W", "XVSSRLN_H_W", "XVSSRAN_H_W", "XVSSRLN_HU_W",
                "XVSSRAN_HU_W", "XVSSRLRN_H_W", "XVSSRARN_H_W", "XVSSRLRN_HU_W",
                "XVSSRARN_HU_W",
                "XVSEQ_W", "XVSLE_W", "XVSLE_WU", "XVSLT_W", "XVSLT_WU",
                "XVPACKEV_W", "XVPACKOD_W", "XVPICKEV_W", "XVPICKOD_W",
                "XVILVL_W", "XVILVH_W", "XVPERM_W"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVADD_Q", "XVSUB_Q",
                "XVSADD_D", "XVSADD_DU", "XVSSUB_D", "XVSSUB_DU",
                "XVHADDW_Q_D", "XVHADDW_QU_DU", "XVHSUBW_Q_D", "XVHSUBW_QU_DU",
                "XVADDWEV_Q_D", "XVADDWOD_Q_D", "XVSUBWEV_Q_D", "XVSUBWOD_Q_D",
                "XVADDWEV_Q_DU", "XVADDWOD_Q_DU", "XVSUBWEV_Q_DU", "XVSUBWOD_Q_DU",
                "XVADDWEV_Q_DU_D", "XVADDWOD_Q_DU_D",
                "XVAVG_D", "XVAVG_DU", "XVAVGR_D", "XVAVGR_DU",
                "XVABSD_D", "XVABSD_DU", "XVADDA_D", "XVMUH_D", "XVMUH_DU",
                "XVMULWEV_Q_D", "XVMULWOD_Q_D", "XVMULWEV_Q_DU", "XVMULWOD_Q_DU",
                "XVMULWEV_Q_DU_D", "XVMULWOD_Q_DU_D", "XVSIGNCOV_D", "XVROTR_D",
                "XVSRLR_D", "XVSRAR_D", "XVSRLN_W_D", "XVSRAN_W_D", "XVSRLRN_W_D",
                "XVSRARN_W_D", "XVSSRLN_W_D", "XVSSRAN_W_D", "XVSSRLN_WU_D",
                "XVSSRAN_WU_D", "XVSSRLRN_W_D", "XVSSRARN_W_D", "XVSSRLRN_WU_D",
                "XVSSRARN_WU_D", "XVFFINT_S_L",
                "XVSEQ_D", "XVSLE_D", "XVSLE_DU", "XVSLT_D", "XVSLT_DU",
                "XVPACKEV_D", "XVPACKOD_D", "XVPICKEV_D", "XVPICKOD_D",
                "XVILVL_D", "XVILVH_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xd, vty:$xj, vty:$xk),
//     (LAInst vty:$xd, vty:$xj, vty:$xk)>;
foreach Inst = ["XVMADDWEV_H_B", "XVMADDWOD_H_B", "XVMADDWEV_H_BU",
                "XVMADDWOD_H_BU", "XVMADDWEV_H_BU_B", "XVMADDWOD_H_BU_B"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v16i16 LASX256:$xd), (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVMADDWEV_W_H", "XVMADDWOD_W_H", "XVMADDWEV_W_HU",
                "XVMADDWOD_W_HU", "XVMADDWEV_W_HU_H", "XVMADDWOD_W_HU_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v8i32 LASX256:$xd), (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVMADDWEV_D_W", "XVMADDWOD_D_W", "XVMADDWEV_D_WU",
                "XVMADDWOD_D_WU", "XVMADDWEV_D_WU_W", "XVMADDWOD_D_WU_W"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4i64 LASX256:$xd), (v8i32 LASX256:$xj), (v8i32 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVMADDWEV_Q_D", "XVMADDWOD_Q_D", "XVMADDWEV_Q_DU",
                "XVMADDWOD_Q_DU", "XVMADDWEV_Q_DU_D", "XVMADDWOD_Q_DU_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), (v4i64 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xj),
//     (LAInst vty:$xj)>;
foreach Inst = ["XVEXTH_H_B", "XVEXTH_HU_BU",
                "XVMSKLTZ_B", "XVMSKGEZ_B", "XVMSKNZ_B",
                "XVCLO_B", "VEXT2XV_H_B", "VEXT2XV_HU_BU",
                "VEXT2XV_W_B", "VEXT2XV_WU_BU", "VEXT2XV_D_B",
                "VEXT2XV_DU_BU", "XVREPLVE0_B", "XVREPLVE0_Q"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v32i8 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;
foreach Inst = ["XVEXTH_W_H", "XVEXTH_WU_HU", "XVMSKLTZ_H",
                "XVCLO_H", "XVFCVTL_S_H", "XVFCVTH_S_H",
                "VEXT2XV_W_H", "VEXT2XV_WU_HU", "VEXT2XV_D_H",
                "VEXT2XV_DU_HU", "XVREPLVE0_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v16i16 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;
foreach Inst = ["XVEXTH_D_W", "XVEXTH_DU_WU", "XVMSKLTZ_W",
                "XVCLO_W", "XVFFINT_S_W", "XVFFINT_S_WU",
                "XVFFINTL_D_W", "XVFFINTH_D_W",
                "VEXT2XV_D_W", "VEXT2XV_DU_WU", "XVREPLVE0_W"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8i32 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;
foreach Inst = ["XVEXTH_Q_D", "XVEXTH_QU_DU", "XVMSKLTZ_D",
                "XVEXTL_Q_D", "XVEXTL_QU_DU",
                "XVCLO_D", "XVFFINT_D_L", "XVFFINT_D_LU",
                "XVREPLVE0_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4i64 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;

// Pat<(Intrinsic timm:$imm)
//     (LAInst timm:$imm)>;
def : Pat<(int_loongarch_lasx_xvldi timm:$imm),
          (XVLDI (to_valid_timm timm:$imm))>;
foreach Inst = ["XVREPLI_B", "XVREPLI_H", "XVREPLI_W", "XVREPLI_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret timm:$imm),
            (!cast<LAInst>("Pseudo"#Inst) (to_valid_timm timm:$imm))>;

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xj, timm:$imm)
//     (LAInst vty:$xj, timm:$imm)>;
foreach Inst = ["XVSAT_B", "XVSAT_BU", "XVNORI_B", "XVROTRI_B", "XVSLLWIL_H_B",
                "XVSLLWIL_HU_BU", "XVSRLRI_B", "XVSRARI_B",
                "XVSEQI_B", "XVSLEI_B", "XVSLEI_BU", "XVSLTI_B", "XVSLTI_BU",
                "XVREPL128VEI_B", "XVBSLL_V", "XVBSRL_V", "XVSHUF4I_B"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v32i8 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSAT_H", "XVSAT_HU", "XVROTRI_H", "XVSLLWIL_W_H",
                "XVSLLWIL_WU_HU", "XVSRLRI_H", "XVSRARI_H",
                "XVSEQI_H", "XVSLEI_H", "XVSLEI_HU", "XVSLTI_H", "XVSLTI_HU",
                "XVREPL128VEI_H", "XVSHUF4I_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v16i16 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSAT_W", "XVSAT_WU", "XVROTRI_W", "XVSLLWIL_D_W",
                "XVSLLWIL_DU_WU", "XVSRLRI_W", "XVSRARI_W",
                "XVSEQI_W", "XVSLEI_W", "XVSLEI_WU", "XVSLTI_W", "XVSLTI_WU",
                "XVREPL128VEI_W", "XVSHUF4I_W", "XVPICKVE_W"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8i32 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSAT_D", "XVSAT_DU", "XVROTRI_D", "XVSRLRI_D", "XVSRARI_D",
                "XVSEQI_D", "XVSLEI_D", "XVSLEI_DU", "XVSLTI_D", "XVSLTI_DU",
                "XVPICKVE2GR_D", "XVPICKVE2GR_DU",
                "XVREPL128VEI_D", "XVPERMI_D", "XVPICKVE_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4i64 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xj, (to_valid_timm timm:$imm))>;

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xd, vty:$xj, timm:$imm)
//     (LAInst vty:$xd, vty:$xj, timm:$imm)>;
foreach Inst = ["XVSRLNI_B_H", "XVSRANI_B_H", "XVSRLRNI_B_H", "XVSRARNI_B_H",
                "XVSSRLNI_B_H", "XVSSRANI_B_H", "XVSSRLNI_BU_H", "XVSSRANI_BU_H",
                "XVSSRLRNI_B_H", "XVSSRARNI_B_H", "XVSSRLRNI_BU_H", "XVSSRARNI_BU_H",
                "XVFRSTPI_B", "XVBITSELI_B", "XVEXTRINS_B", "XVPERMI_Q"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v32i8 LASX256:$xd), (v32i8 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj,
               (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSRLNI_H_W", "XVSRANI_H_W", "XVSRLRNI_H_W", "XVSRARNI_H_W",
                "XVSSRLNI_H_W", "XVSSRANI_H_W", "XVSSRLNI_HU_W", "XVSSRANI_HU_W",
                "XVSSRLRNI_H_W", "XVSSRARNI_H_W", "XVSSRLRNI_HU_W", "XVSSRARNI_HU_W",
                "XVFRSTPI_H", "XVEXTRINS_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v16i16 LASX256:$xd), (v16i16 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj,
               (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSRLNI_W_D", "XVSRANI_W_D", "XVSRLRNI_W_D", "XVSRARNI_W_D",
                "XVSSRLNI_W_D", "XVSSRANI_W_D", "XVSSRLNI_WU_D", "XVSSRANI_WU_D",
                "XVSSRLRNI_W_D", "XVSSRARNI_W_D", "XVSSRLRNI_WU_D", "XVSSRARNI_WU_D",
                "XVPERMI_W", "XVEXTRINS_W", "XVINSVE0_W"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v8i32 LASX256:$xd), (v8i32 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj,
               (to_valid_timm timm:$imm))>;
foreach Inst = ["XVSRLNI_D_Q", "XVSRANI_D_Q", "XVSRLRNI_D_Q", "XVSRARNI_D_Q",
                "XVSSRLNI_D_Q", "XVSSRANI_D_Q", "XVSSRLNI_DU_Q", "XVSSRANI_DU_Q",
                "XVSSRLRNI_D_Q", "XVSSRARNI_D_Q", "XVSSRLRNI_DU_Q", "XVSSRARNI_DU_Q",
                "XVSHUF4I_D", "XVEXTRINS_D", "XVINSVE0_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4i64 LASX256:$xd), (v4i64 LASX256:$xj), timm:$imm),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj,
               (to_valid_timm timm:$imm))>;

// vty: v32i8/v16i16/v8i32/v4i64
// Pat<(Intrinsic vty:$xd, vty:$xj, vty:$xk),
//     (LAInst vty:$xd, vty:$xj, vty:$xk)>;
foreach Inst = ["XVFRSTP_B", "XVBITSEL_V", "XVSHUF_B"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v32i8 LASX256:$xd), (v32i8 LASX256:$xj), (v32i8 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVFRSTP_H", "XVSHUF_H"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v16i16 LASX256:$xd), (v16i16 LASX256:$xj), (v16i16 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
def : Pat<(int_loongarch_lasx_xvshuf_w (v8i32 LASX256:$xd), (v8i32 LASX256:$xj),
                                     (v8i32 LASX256:$xk)),
          (XVSHUF_W LASX256:$xd, LASX256:$xj, LASX256:$xk)>;
def : Pat<(int_loongarch_lasx_xvshuf_d (v4i64 LASX256:$xd), (v4i64 LASX256:$xj),
                                     (v4i64 LASX256:$xk)),
          (XVSHUF_D LASX256:$xd, LASX256:$xj, LASX256:$xk)>;

// vty: v8f32/v4f64
// Pat<(Intrinsic vty:$xj, vty:$xk, vty:$xa),
//     (LAInst vty:$xj, vty:$xk, vty:$xa)>;
foreach Inst = ["XVFMSUB_S", "XVFNMADD_S", "XVFNMSUB_S"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v8f32 LASX256:$xj), (v8f32 LASX256:$xk), (v8f32 LASX256:$xa)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk, LASX256:$xa)>;
foreach Inst = ["XVFMSUB_D", "XVFNMADD_D", "XVFNMSUB_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4f64 LASX256:$xj), (v4f64 LASX256:$xk), (v4f64 LASX256:$xa)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk, LASX256:$xa)>;

// vty: v8f32/v4f64
// Pat<(Intrinsic vty:$xj, vty:$xk),
//     (LAInst vty:$xj, vty:$xk)>;
foreach Inst = ["XVFMAX_S", "XVFMIN_S", "XVFMAXA_S", "XVFMINA_S", "XVFCVT_H_S",
                "XVFCMP_CAF_S", "XVFCMP_CUN_S", "XVFCMP_CEQ_S", "XVFCMP_CUEQ_S",
                "XVFCMP_CLT_S", "XVFCMP_CULT_S", "XVFCMP_CLE_S", "XVFCMP_CULE_S",
                "XVFCMP_CNE_S", "XVFCMP_COR_S", "XVFCMP_CUNE_S",
                "XVFCMP_SAF_S", "XVFCMP_SUN_S", "XVFCMP_SEQ_S", "XVFCMP_SUEQ_S",
                "XVFCMP_SLT_S", "XVFCMP_SULT_S", "XVFCMP_SLE_S", "XVFCMP_SULE_S",
                "XVFCMP_SNE_S", "XVFCMP_SOR_S", "XVFCMP_SUNE_S"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v8f32 LASX256:$xj), (v8f32 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;
foreach Inst = ["XVFMAX_D", "XVFMIN_D", "XVFMAXA_D", "XVFMINA_D", "XVFCVT_S_D",
                "XVFTINTRNE_W_D", "XVFTINTRZ_W_D", "XVFTINTRP_W_D", "XVFTINTRM_W_D",
                "XVFTINT_W_D",
                "XVFCMP_CAF_D", "XVFCMP_CUN_D", "XVFCMP_CEQ_D", "XVFCMP_CUEQ_D",
                "XVFCMP_CLT_D", "XVFCMP_CULT_D", "XVFCMP_CLE_D", "XVFCMP_CULE_D",
                "XVFCMP_CNE_D", "XVFCMP_COR_D", "XVFCMP_CUNE_D",
                "XVFCMP_SAF_D", "XVFCMP_SUN_D", "XVFCMP_SEQ_D", "XVFCMP_SUEQ_D",
                "XVFCMP_SLT_D", "XVFCMP_SULT_D", "XVFCMP_SLE_D", "XVFCMP_SULE_D",
                "XVFCMP_SNE_D", "XVFCMP_SOR_D", "XVFCMP_SUNE_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret
               (v4f64 LASX256:$xj), (v4f64 LASX256:$xk)),
            (!cast<LAInst>(Inst) LASX256:$xj, LASX256:$xk)>;

// vty: v8f32/v4f64
// Pat<(Intrinsic vty:$xj),
//     (LAInst vty:$xj)>;
foreach Inst = ["XVFLOGB_S", "XVFCLASS_S", "XVFSQRT_S", "XVFRECIP_S", "XVFRSQRT_S",
                "XVFRINT_S", "XVFCVTL_D_S", "XVFCVTH_D_S",
                "XVFRINTRNE_S", "XVFRINTRZ_S", "XVFRINTRP_S", "XVFRINTRM_S",
                "XVFTINTRNE_W_S", "XVFTINTRZ_W_S", "XVFTINTRP_W_S", "XVFTINTRM_W_S",
                "XVFTINT_W_S", "XVFTINTRZ_WU_S", "XVFTINT_WU_S",
                "XVFTINTRNEL_L_S", "XVFTINTRNEH_L_S", "XVFTINTRZL_L_S",
                "XVFTINTRZH_L_S", "XVFTINTRPL_L_S", "XVFTINTRPH_L_S",
                "XVFTINTRML_L_S", "XVFTINTRMH_L_S", "XVFTINTL_L_S",
                "XVFTINTH_L_S"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8f32 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;
foreach Inst = ["XVFLOGB_D", "XVFCLASS_D", "XVFSQRT_D", "XVFRECIP_D", "XVFRSQRT_D",
                "XVFRINT_D",
                "XVFRINTRNE_D", "XVFRINTRZ_D", "XVFRINTRP_D", "XVFRINTRM_D",
                "XVFTINTRNE_L_D", "XVFTINTRZ_L_D", "XVFTINTRP_L_D", "XVFTINTRM_L_D",
                "XVFTINT_L_D", "XVFTINTRZ_LU_D", "XVFTINT_LU_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4f64 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;

// 256-Bit vector FP approximate reciprocal operation
let Predicates = [HasFrecipe] in {
foreach Inst = ["XVFRECIPE_S", "XVFRSQRTE_S"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v8f32 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;
foreach Inst = ["XVFRECIPE_D", "XVFRSQRTE_D"] in
  def : Pat<(deriveLASXIntrinsic<Inst>.ret (v4f64 LASX256:$xj)),
            (!cast<LAInst>(Inst) LASX256:$xj)>;

def : Pat<(loongarch_vfrecipe v8f32:$src), 
          (XVFRECIPE_S v8f32:$src)>;
def : Pat<(loongarch_vfrecipe v4f64:$src), 
          (XVFRECIPE_D v4f64:$src)>;
def : Pat<(loongarch_vfrsqrte v8f32:$src), 
          (XVFRSQRTE_S v8f32:$src)>;
def : Pat<(loongarch_vfrsqrte v4f64:$src), 
          (XVFRSQRTE_D v4f64:$src)>;
}

def : Pat<(int_loongarch_lasx_xvpickve_w_f v8f32:$xj, timm:$imm),
          (XVPICKVE_W v8f32:$xj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvpickve_d_f v4f64:$xj, timm:$imm),
          (XVPICKVE_D v4f64:$xj, (to_valid_timm timm:$imm))>;

// load
def : Pat<(int_loongarch_lasx_xvld GPR:$rj, timm:$imm),
          (XVLD GPR:$rj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvldx GPR:$rj, GPR:$rk),
          (XVLDX GPR:$rj, GPR:$rk)>;

// xvldrepl
def : Pat<(int_loongarch_lasx_xvldrepl_b GPR:$rj, timm:$imm),
          (XVLDREPL_B GPR:$rj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvldrepl_h GPR:$rj, timm:$imm),
          (XVLDREPL_H GPR:$rj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvldrepl_w GPR:$rj, timm:$imm),
          (XVLDREPL_W GPR:$rj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvldrepl_d GPR:$rj, timm:$imm),
          (XVLDREPL_D GPR:$rj, (to_valid_timm timm:$imm))>;

defm : VldreplPat<v32i8, XVLDREPL_B, simm12_addlike>;
defm : VldreplPat<v16i16, XVLDREPL_H, simm11_lsl1>;
defm : VldreplPat<v8i32, XVLDREPL_W, simm10_lsl2>;
defm : VldreplPat<v4i64, XVLDREPL_D, simm9_lsl3>;
defm : VldreplPat<v8f32, XVLDREPL_W, simm10_lsl2>;
defm : VldreplPat<v4f64, XVLDREPL_D, simm9_lsl3>;

// store
def : Pat<(int_loongarch_lasx_xvst LASX256:$xd, GPR:$rj, timm:$imm),
          (XVST LASX256:$xd, GPR:$rj, (to_valid_timm timm:$imm))>;
def : Pat<(int_loongarch_lasx_xvstx LASX256:$xd, GPR:$rj, GPR:$rk),
          (XVSTX LASX256:$xd, GPR:$rj, GPR:$rk)>;

def : Pat<(int_loongarch_lasx_xvstelm_b v32i8:$xd, GPR:$rj, timm:$imm, timm:$idx),
          (XVSTELM_B v32i8:$xd, GPR:$rj, (to_valid_timm timm:$imm),
                    (to_valid_timm timm:$idx))>;
def : Pat<(int_loongarch_lasx_xvstelm_h v16i16:$xd, GPR:$rj, timm:$imm, timm:$idx),
          (XVSTELM_H v16i16:$xd, GPR:$rj, (to_valid_timm timm:$imm),
                    (to_valid_timm timm:$idx))>;
def : Pat<(int_loongarch_lasx_xvstelm_w v8i32:$xd, GPR:$rj, timm:$imm, timm:$idx),
          (XVSTELM_W v8i32:$xd, GPR:$rj, (to_valid_timm timm:$imm),
                    (to_valid_timm timm:$idx))>;
def : Pat<(int_loongarch_lasx_xvstelm_d v4i64:$xd, GPR:$rj, timm:$imm, timm:$idx),
          (XVSTELM_D v4i64:$xd, GPR:$rj, (to_valid_timm timm:$imm),
                    (to_valid_timm timm:$idx))>;

} // Predicates = [HasExtLASX]