aboutsummaryrefslogtreecommitdiff
path: root/lldb/source/Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp
AgeCommit message (Expand)AuthorFilesLines
2025-08-08[lldb] Support the Python stable C API in PythonString::AsUTF8 (#152599)Jonas Devlieghere1-5/+23
2025-08-06[lldb] Eliminate (_)Py_IsFinalizing (NFC) (#152226)Jonas Devlieghere1-16/+3
2025-08-05[lldb] Drop PY_MINOR_VERSION >= 3 check (NFC)Jonas Devlieghere1-6/+1
2025-08-05[lldb] Use Python Bytes instead of Buffer for Binary I/O (NFC) (#152031)Jonas Devlieghere1-40/+6
2025-08-05[lldb] Reimplement PythonObject::Dump using the limited API (#152055)Jonas Devlieghere1-17/+25
2025-08-04[lldb] Eliminate PyGILState_Check (NFC) (#152006)Jonas Devlieghere1-1/+0
2025-08-04[lldb] Use fully qualified name instead of namespace (NFC)Jonas Devlieghere1-8/+3
2025-08-01[lldb] Reimplment PyRun_SimpleString using the Python stable C API (#151777)Jonas Devlieghere1-0/+16
2025-08-01[lldb] Reimplment PyRun_String using the Python stable C API (#151761)Jonas Devlieghere1-11/+30
2025-08-01[lldb] Replace Python APIs with their stable equivalent (#151618)Jonas Devlieghere1-3/+3
2025-01-29[lldb] Make Python >= 3.8 required for LLDB 21 (#124735)David Spickett1-58/+1
2024-10-31[lldb] Use PY_VERSION_HEX to simplify conditional compilation (NFC) (#114346)Jonas Devlieghere1-6/+6
2024-09-23[lldb] Change the implementation of Status to store an llvm::Error (NFC) (#10...Adrian Prantl1-10/+19
2024-09-23Revert "[lldb] Change the implementation of Status to store an llvm::Error (N...Adrian Prantl1-19/+10
2024-09-23[lldb] Change the implementation of Status to store an llvm::Error (NFC) (#10...Adrian Prantl1-10/+19
2024-09-20Revert "[lldb] Change the implementation of Status to store an llvm::Error (N...Adrian Prantl1-19/+10
2024-09-20[lldb] Change the implementation of Status to store an llvm::Error (NFC) (#10...Adrian Prantl1-10/+19
2024-09-20Revert "[lldb] Change the implementation of Status to store an llvm::Error (N...David Spickett1-19/+10
2024-09-19[lldb] Change the implementation of Status to store an llvm::Error (NFC) (#10...Adrian Prantl1-10/+19
2024-09-18Revert "[lldb] Change the implementation of Status to store an llvm::Error (N...Adrian Prantl1-20/+11
2024-09-18[lldb] Change the implementation of Status to store an llvm::Error (NFC) (#10...Adrian Prantl1-11/+20
2024-09-05[lldb] Make conversions from llvm::Error explicit with Status::FromEr… (#10...Adrian Prantl1-11/+11
2024-08-27[lldb] Turn lldb_private::Status into a value type. (#106163)Adrian Prantl1-3/+6
2024-04-22[lldb] Replace condition that always evaluates to false (#89685)Troy Butler1-1/+1
2023-11-07[lldb] Check for abstract methods implementation in Scripted Plugin Objects (...Med Ismail Bennani1-0/+14
2023-11-07Revert "[lldb] Check for abstract methods implementation in Scripted Plugin O...Med Ismail Bennani1-14/+0
2023-11-07[lldb] Check for abstract methods implementation in Scripted Plugin Objects (...Med Ismail Bennani1-0/+14
2023-10-30[lldb] Adapt code to Python 3.13 (#70445)Tulio Magno Quites Machado Filho1-1/+3
2023-09-01[lldb] Fix duplicate word typos; NFCFangrui Song1-1/+1
2023-05-22[lldb] Add support for negative integer to {SB,}StructuredDataMed Ismail Bennani1-14/+37
2022-12-14Don't include StringSwitch (NFC)Kazu Hirata1-1/+0
2022-08-27[lldb] Use nullptr instead of NULL (NFC)Kazu Hirata1-2/+2
2022-04-27Remove Python 2 support from the ScriptInterpreter pluginJonas Devlieghere1-157/+2
2022-02-03[lldb] Rename Logging.h to LLDBLog.h and clean up includesPavel Labath1-0/+1
2022-02-02[lldb] Convert "LLDB" log channel to the new APIPavel Labath1-1/+1
2022-01-24[lldb] Make PythonDataObjects work with Python 2Jonas Devlieghere1-1/+5
2022-01-19[lldb] Fix D114722 for python<=3.6Pavel Labath1-0/+22
2022-01-18[lldb/python] Use PythonObject in LLDBSwigPython functionsPavel Labath1-1/+2
2022-01-17[LLDB] Fix Python GIL-not-held issuesRalf Grosse-Kunstleve1-0/+1
2021-11-10[lldb] make it easier to find LLDB's pythonLawrence D'Anna1-14/+0
2021-08-09[lldb] [gdb-remote] Add eOpenOptionReadWrite for future gdb compatMichał Górny1-5/+10
2021-05-26[lldb][NFC] Use C++ versions of the deprecated C standard library headersRaphael Isemann1-1/+1
2020-08-05[lldb] Use PyUnicode_GetLength instead of PyUnicode_GetSizeTatyana Krasnukha1-0/+4
2020-05-08Re-land "get rid of PythonInteger::GetInteger()"Lawrence D'Anna1-21/+68
2020-04-23Revert "get rid of PythonInteger::GetInteger()"Muhammad Omair Javaid1-19/+21
2020-04-21get rid of PythonInteger::GetInteger()Lawrence D'Anna1-21/+19
2020-04-07[lldb] NFC: Fix trivial typo in comments, documents, and messagesKazuaki Ishizaki1-1/+1
2020-01-29Fix implicit conversion in the lldb Python pluginBenjamin Kramer1-1/+1
2020-01-24[lldb][NFC] Fix all formatting errors in .cpp file headersRaphael Isemann1-1/+1
2019-12-13[lldb/CMake] Rename LLDB_DISABLE_PYTHON to LLDB_ENABLE_PYTHONJonas Devlieghere1-1/+1
LSX_BUILTIN (vrotr_d, LARCH_V2DI_FTYPE_V2DI_V2DI), LSX_BUILTIN (vadd_q, LARCH_V2DI_FTYPE_V2DI_V2DI), LSX_BUILTIN (vsub_q, LARCH_V2DI_FTYPE_V2DI_V2DI), LSX_BUILTIN (vldrepl_b, LARCH_V16QI_FTYPE_CVPOINTER_SI), LSX_BUILTIN (vldrepl_h, LARCH_V8HI_FTYPE_CVPOINTER_SI), LSX_BUILTIN (vldrepl_w, LARCH_V4SI_FTYPE_CVPOINTER_SI), LSX_BUILTIN (vldrepl_d, LARCH_V2DI_FTYPE_CVPOINTER_SI), LSX_BUILTIN (vmskgez_b, LARCH_V16QI_FTYPE_V16QI), LSX_BUILTIN (vmsknz_b, LARCH_V16QI_FTYPE_V16QI), LSX_BUILTIN (vexth_h_b, LARCH_V8HI_FTYPE_V16QI), LSX_BUILTIN (vexth_w_h, LARCH_V4SI_FTYPE_V8HI), LSX_BUILTIN (vexth_d_w, LARCH_V2DI_FTYPE_V4SI), LSX_BUILTIN (vexth_q_d, LARCH_V2DI_FTYPE_V2DI), LSX_BUILTIN (vexth_hu_bu, LARCH_UV8HI_FTYPE_UV16QI), LSX_BUILTIN (vexth_wu_hu, LARCH_UV4SI_FTYPE_UV8HI), LSX_BUILTIN (vexth_du_wu, LARCH_UV2DI_FTYPE_UV4SI), LSX_BUILTIN (vexth_qu_du, LARCH_UV2DI_FTYPE_UV2DI), LSX_BUILTIN (vrotri_b, LARCH_V16QI_FTYPE_V16QI_UQI), LSX_BUILTIN (vrotri_h, LARCH_V8HI_FTYPE_V8HI_UQI), LSX_BUILTIN (vrotri_w, LARCH_V4SI_FTYPE_V4SI_UQI), LSX_BUILTIN (vrotri_d, LARCH_V2DI_FTYPE_V2DI_UQI), LSX_BUILTIN (vextl_q_d, LARCH_V2DI_FTYPE_V2DI), LSX_BUILTIN (vsrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vsrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vsrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vsrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vsrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vsrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vsrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vsrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vssrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vssrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vssrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrlni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI), LSX_BUILTIN (vssrlni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI), LSX_BUILTIN (vssrlni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI), LSX_BUILTIN (vssrlni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI), LSX_BUILTIN (vssrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vssrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vssrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vssrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrlrni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI), LSX_BUILTIN (vssrlrni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI), LSX_BUILTIN (vssrlrni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI), LSX_BUILTIN (vssrlrni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI), LSX_BUILTIN (vsrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vsrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vsrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vsrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vsrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vsrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vsrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vsrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vssrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vssrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vssrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrani_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI), LSX_BUILTIN (vssrani_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI), LSX_BUILTIN (vssrani_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI), LSX_BUILTIN (vssrani_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI), LSX_BUILTIN (vssrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI), LSX_BUILTIN (vssrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI), LSX_BUILTIN (vssrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vssrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI), LSX_BUILTIN (vssrarni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI), LSX_BUILTIN (vssrarni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI), LSX_BUILTIN (vssrarni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI), LSX_BUILTIN (vssrarni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI), LSX_BUILTIN (vpermi_w, LARCH_V4SI_FTYPE_V4SI_V4SI_USI), LSX_BUILTIN (vld, LARCH_V16QI_FTYPE_CVPOINTER_SI), LSX_NO_TARGET_BUILTIN (vst, LARCH_VOID_FTYPE_V16QI_CVPOINTER_SI), LSX_BUILTIN (vssrlrn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI), LSX_BUILTIN (vssrlrn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI), LSX_BUILTIN (vssrlrn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI), LSX_BUILTIN (vssrln_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI), LSX_BUILTIN (vssrln_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI), LSX_BUILTIN (vssrln_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI), LSX_BUILTIN (vorn_v, LARCH_V16QI_FTYPE_V16QI_V16QI), LSX_BUILTIN (vldi, LARCH_V2DI_FTYPE_HI), LSX_BUILTIN (vshuf_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI), LSX_BUILTIN (vldx, LARCH_V16QI_FTYPE_CVPOINTER_DI), LSX_NO_TARGET_BUILTIN (vstx, LARCH_VOID_FTYPE_V16QI_CVPOINTER_DI), LSX_BUILTIN (vextl_qu_du, LARCH_UV2DI_FTYPE_UV2DI), /* Built-in functions for LASX */ LASX_BUILTIN (xvsll_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsll_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsll_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsll_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvslli_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvslli_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvslli_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvslli_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsra_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsra_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsra_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsra_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsrai_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsrai_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsrai_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsrai_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsrar_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsrar_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrar_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrar_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsrari_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsrari_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsrari_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsrari_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsrl_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsrl_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrl_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrl_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsrli_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsrli_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsrli_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsrli_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsrlr_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsrlr_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrlr_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrlr_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsrlri_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsrlri_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsrlri_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsrlri_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvbitclr_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvbitclr_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvbitclr_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvbitclr_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvbitclri_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvbitclri_h, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvbitclri_w, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvbitclri_d, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvbitset_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvbitset_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvbitset_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvbitset_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvbitseti_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvbitseti_h, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvbitseti_w, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvbitseti_d, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvbitrev_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvbitrev_h, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvbitrev_w, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvbitrev_d, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvbitrevi_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvbitrevi_h, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvbitrevi_w, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvbitrevi_d, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvaddi_bu, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvaddi_hu, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvaddi_wu, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvaddi_du, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsub_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsub_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsub_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsub_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsubi_bu, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsubi_hu, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsubi_wu, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsubi_du, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvmax_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmax_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmax_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmax_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmaxi_b, LARCH_V32QI_FTYPE_V32QI_QI), LASX_BUILTIN (xvmaxi_h, LARCH_V16HI_FTYPE_V16HI_QI), LASX_BUILTIN (xvmaxi_w, LARCH_V8SI_FTYPE_V8SI_QI), LASX_BUILTIN (xvmaxi_d, LARCH_V4DI_FTYPE_V4DI_QI), LASX_BUILTIN (xvmax_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmax_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmax_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmax_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmaxi_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvmaxi_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvmaxi_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvmaxi_du, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvmin_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmin_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmin_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmin_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmini_b, LARCH_V32QI_FTYPE_V32QI_QI), LASX_BUILTIN (xvmini_h, LARCH_V16HI_FTYPE_V16HI_QI), LASX_BUILTIN (xvmini_w, LARCH_V8SI_FTYPE_V8SI_QI), LASX_BUILTIN (xvmini_d, LARCH_V4DI_FTYPE_V4DI_QI), LASX_BUILTIN (xvmin_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmin_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmin_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmin_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmini_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvmini_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvmini_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvmini_du, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvseq_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvseq_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvseq_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvseq_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvseqi_b, LARCH_V32QI_FTYPE_V32QI_QI), LASX_BUILTIN (xvseqi_h, LARCH_V16HI_FTYPE_V16HI_QI), LASX_BUILTIN (xvseqi_w, LARCH_V8SI_FTYPE_V8SI_QI), LASX_BUILTIN (xvseqi_d, LARCH_V4DI_FTYPE_V4DI_QI), LASX_BUILTIN (xvslt_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvslt_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvslt_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvslt_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvslti_b, LARCH_V32QI_FTYPE_V32QI_QI), LASX_BUILTIN (xvslti_h, LARCH_V16HI_FTYPE_V16HI_QI), LASX_BUILTIN (xvslti_w, LARCH_V8SI_FTYPE_V8SI_QI), LASX_BUILTIN (xvslti_d, LARCH_V4DI_FTYPE_V4DI_QI), LASX_BUILTIN (xvslt_bu, LARCH_V32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvslt_hu, LARCH_V16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvslt_wu, LARCH_V8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvslt_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvslti_bu, LARCH_V32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvslti_hu, LARCH_V16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvslti_wu, LARCH_V8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvslti_du, LARCH_V4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvsle_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsle_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsle_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsle_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvslei_b, LARCH_V32QI_FTYPE_V32QI_QI), LASX_BUILTIN (xvslei_h, LARCH_V16HI_FTYPE_V16HI_QI), LASX_BUILTIN (xvslei_w, LARCH_V8SI_FTYPE_V8SI_QI), LASX_BUILTIN (xvslei_d, LARCH_V4DI_FTYPE_V4DI_QI), LASX_BUILTIN (xvsle_bu, LARCH_V32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvsle_hu, LARCH_V16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvsle_wu, LARCH_V8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvsle_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvslei_bu, LARCH_V32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvslei_hu, LARCH_V16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvslei_wu, LARCH_V8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvslei_du, LARCH_V4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvsat_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsat_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsat_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsat_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvsat_bu, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvsat_hu, LARCH_UV16HI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvsat_wu, LARCH_UV8SI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvsat_du, LARCH_UV4DI_FTYPE_UV4DI_UQI), LASX_BUILTIN (xvadda_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvadda_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvadda_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvadda_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsadd_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvsadd_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvsadd_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvsadd_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvavg_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvavg_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvavg_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvavg_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvavg_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvavg_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvavg_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvavg_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvavgr_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvavgr_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvavgr_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvavgr_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvavgr_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvavgr_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvavgr_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvavgr_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvssub_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvssub_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvssub_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvssub_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssub_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvssub_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvssub_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvssub_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvabsd_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvabsd_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvabsd_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvabsd_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvabsd_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvabsd_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvabsd_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvabsd_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmul_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmul_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmul_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmul_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmadd_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI), LASX_BUILTIN (xvmadd_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI), LASX_BUILTIN (xvmadd_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI), LASX_BUILTIN (xvmadd_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI), LASX_BUILTIN (xvmsub_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI), LASX_BUILTIN (xvmsub_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI), LASX_BUILTIN (xvmsub_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI), LASX_BUILTIN (xvmsub_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI), LASX_BUILTIN (xvdiv_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvdiv_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvdiv_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvdiv_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvdiv_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvdiv_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvdiv_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvdiv_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvhaddw_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvhaddw_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvhaddw_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvhaddw_hu_bu, LARCH_UV16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvhaddw_wu_hu, LARCH_UV8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvhaddw_du_wu, LARCH_UV4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvhsubw_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvhsubw_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvhsubw_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvhsubw_hu_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvhsubw_wu_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvhsubw_du_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmod_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmod_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmod_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmod_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmod_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmod_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmod_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmod_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvrepl128vei_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvrepl128vei_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvrepl128vei_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvrepl128vei_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvpickev_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvpickev_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvpickev_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvpickev_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvpickod_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvpickod_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvpickod_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvpickod_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvilvh_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvilvh_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvilvh_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvilvh_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvilvl_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvilvl_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvilvl_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvilvl_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvpackev_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvpackev_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvpackev_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvpackev_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvpackod_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvpackod_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvpackod_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvpackod_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvshuf_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI), LASX_BUILTIN (xvshuf_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI), LASX_BUILTIN (xvshuf_w, LARCH_V8SI_FTYPE_V8SI_V8SI_V8SI), LASX_BUILTIN (xvshuf_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI), LASX_BUILTIN (xvand_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvandi_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvnor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvnori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvxor_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvxori_b, LARCH_UV32QI_FTYPE_UV32QI_UQI), LASX_BUILTIN (xvbitsel_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI_UV32QI), LASX_BUILTIN (xvbitseli_b, LARCH_UV32QI_FTYPE_UV32QI_UV32QI_USI), LASX_BUILTIN (xvshuf4i_b, LARCH_V32QI_FTYPE_V32QI_USI), LASX_BUILTIN (xvshuf4i_h, LARCH_V16HI_FTYPE_V16HI_USI), LASX_BUILTIN (xvshuf4i_w, LARCH_V8SI_FTYPE_V8SI_USI), LASX_BUILTIN (xvreplgr2vr_b, LARCH_V32QI_FTYPE_SI), LASX_BUILTIN (xvreplgr2vr_h, LARCH_V16HI_FTYPE_SI), LASX_BUILTIN (xvreplgr2vr_w, LARCH_V8SI_FTYPE_SI), LASX_BUILTIN (xvreplgr2vr_d, LARCH_V4DI_FTYPE_DI), LASX_BUILTIN (xvpcnt_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvpcnt_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvpcnt_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvpcnt_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvclo_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvclo_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvclo_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvclo_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvclz_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvclz_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvclz_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvclz_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvrepli_b, LARCH_V32QI_FTYPE_HI), LASX_BUILTIN (xvrepli_h, LARCH_V16HI_FTYPE_HI), LASX_BUILTIN (xvrepli_w, LARCH_V8SI_FTYPE_HI), LASX_BUILTIN (xvrepli_d, LARCH_V4DI_FTYPE_HI), LASX_BUILTIN (xvfcmp_caf_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_caf_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cor_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cor_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cun_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cun_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cune_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cune_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cueq_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cueq_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_ceq_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_ceq_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cne_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cne_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_clt_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_clt_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cult_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cult_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cle_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cle_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_cule_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_cule_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_saf_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_saf_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sor_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sor_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sun_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sun_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sune_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sune_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sueq_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sueq_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_seq_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_seq_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sne_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sne_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_slt_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_slt_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sult_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sult_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sle_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sle_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcmp_sule_s, LARCH_V8SI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcmp_sule_d, LARCH_V4DI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfmul_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfmul_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfdiv_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfdiv_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfcvt_h_s, LARCH_V16HI_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfcvt_s_d, LARCH_V8SF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfmin_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfmin_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfmina_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfmina_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfmax_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfmax_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfmaxa_s, LARCH_V8SF_FTYPE_V8SF_V8SF), LASX_BUILTIN (xvfmaxa_d, LARCH_V4DF_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvfclass_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvfclass_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvfsqrt_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfsqrt_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrecip_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrecip_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrint_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrint_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrsqrt_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrsqrt_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvflogb_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvflogb_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfcvth_s_h, LARCH_V8SF_FTYPE_V16HI), LASX_BUILTIN (xvfcvth_d_s, LARCH_V4DF_FTYPE_V8SF), LASX_BUILTIN (xvfcvtl_s_h, LARCH_V8SF_FTYPE_V16HI), LASX_BUILTIN (xvfcvtl_d_s, LARCH_V4DF_FTYPE_V8SF), LASX_BUILTIN (xvftint_w_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvftint_l_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvftint_wu_s, LARCH_UV8SI_FTYPE_V8SF), LASX_BUILTIN (xvftint_lu_d, LARCH_UV4DI_FTYPE_V4DF), LASX_BUILTIN (xvftintrz_w_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvftintrz_l_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvftintrz_wu_s, LARCH_UV8SI_FTYPE_V8SF), LASX_BUILTIN (xvftintrz_lu_d, LARCH_UV4DI_FTYPE_V4DF), LASX_BUILTIN (xvffint_s_w, LARCH_V8SF_FTYPE_V8SI), LASX_BUILTIN (xvffint_d_l, LARCH_V4DF_FTYPE_V4DI), LASX_BUILTIN (xvffint_s_wu, LARCH_V8SF_FTYPE_UV8SI), LASX_BUILTIN (xvffint_d_lu, LARCH_V4DF_FTYPE_UV4DI), LASX_BUILTIN (xvreplve_b, LARCH_V32QI_FTYPE_V32QI_SI), LASX_BUILTIN (xvreplve_h, LARCH_V16HI_FTYPE_V16HI_SI), LASX_BUILTIN (xvreplve_w, LARCH_V8SI_FTYPE_V8SI_SI), LASX_BUILTIN (xvreplve_d, LARCH_V4DI_FTYPE_V4DI_SI), LASX_BUILTIN (xvpermi_w, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvandn_v, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvneg_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvneg_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvneg_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvneg_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvmuh_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmuh_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmuh_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmuh_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmuh_bu, LARCH_UV32QI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmuh_hu, LARCH_UV16HI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmuh_wu, LARCH_UV8SI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmuh_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsllwil_h_b, LARCH_V16HI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvsllwil_w_h, LARCH_V8SI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvsllwil_d_w, LARCH_V4DI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvsllwil_hu_bu, LARCH_UV16HI_FTYPE_UV32QI_UQI), /* FIXME: U? */ LASX_BUILTIN (xvsllwil_wu_hu, LARCH_UV8SI_FTYPE_UV16HI_UQI), LASX_BUILTIN (xvsllwil_du_wu, LARCH_UV4DI_FTYPE_UV8SI_UQI), LASX_BUILTIN (xvsran_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsran_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsran_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssran_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvssran_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvssran_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssran_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvssran_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvssran_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsrarn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrarn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrarn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssrarn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvssrarn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvssrarn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssrarn_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvssrarn_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvssrarn_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsrln_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrln_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrln_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssrln_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvssrln_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvssrln_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsrlrn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsrlrn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsrlrn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssrlrn_bu_h, LARCH_UV32QI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvssrlrn_hu_w, LARCH_UV16HI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvssrlrn_wu_d, LARCH_UV8SI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvfrstpi_b, LARCH_V32QI_FTYPE_V32QI_V32QI_UQI), LASX_BUILTIN (xvfrstpi_h, LARCH_V16HI_FTYPE_V16HI_V16HI_UQI), LASX_BUILTIN (xvfrstp_b, LARCH_V32QI_FTYPE_V32QI_V32QI_V32QI), LASX_BUILTIN (xvfrstp_h, LARCH_V16HI_FTYPE_V16HI_V16HI_V16HI), LASX_BUILTIN (xvshuf4i_d, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvbsrl_v, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvbsll_v, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvextrins_b, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvextrins_h, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvextrins_w, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvextrins_d, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvmskltz_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvmskltz_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvmskltz_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvmskltz_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvsigncov_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsigncov_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsigncov_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsigncov_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvfmadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF), LASX_BUILTIN (xvfmadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF), LASX_BUILTIN (xvfmsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF), LASX_BUILTIN (xvfmsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF), LASX_BUILTIN (xvfnmadd_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF), LASX_BUILTIN (xvfnmadd_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF), LASX_BUILTIN (xvfnmsub_s, LARCH_V8SF_FTYPE_V8SF_V8SF_V8SF), LASX_BUILTIN (xvfnmsub_d, LARCH_V4DF_FTYPE_V4DF_V4DF_V4DF), LASX_BUILTIN (xvftintrne_w_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvftintrne_l_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvftintrp_w_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvftintrp_l_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvftintrm_w_s, LARCH_V8SI_FTYPE_V8SF), LASX_BUILTIN (xvftintrm_l_d, LARCH_V4DI_FTYPE_V4DF), LASX_BUILTIN (xvftint_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvffint_s_l, LARCH_V8SF_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvftintrz_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvftintrp_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvftintrm_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvftintrne_w_d, LARCH_V8SI_FTYPE_V4DF_V4DF), LASX_BUILTIN (xvftinth_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintl_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvffinth_d_w, LARCH_V4DF_FTYPE_V8SI), LASX_BUILTIN (xvffintl_d_w, LARCH_V4DF_FTYPE_V8SI), LASX_BUILTIN (xvftintrzh_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrzl_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrph_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrpl_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrmh_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrml_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrneh_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvftintrnel_l_s, LARCH_V4DI_FTYPE_V8SF), LASX_BUILTIN (xvfrintrne_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrintrne_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrintrz_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrintrz_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrintrp_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrintrp_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvfrintrm_s, LARCH_V8SF_FTYPE_V8SF), LASX_BUILTIN (xvfrintrm_d, LARCH_V4DF_FTYPE_V4DF), LASX_BUILTIN (xvld, LARCH_V32QI_FTYPE_CVPOINTER_SI), LASX_NO_TARGET_BUILTIN (xvst, LARCH_VOID_FTYPE_V32QI_CVPOINTER_SI), LASX_NO_TARGET_BUILTIN (xvstelm_b, LARCH_VOID_FTYPE_V32QI_CVPOINTER_SI_UQI), LASX_NO_TARGET_BUILTIN (xvstelm_h, LARCH_VOID_FTYPE_V16HI_CVPOINTER_SI_UQI), LASX_NO_TARGET_BUILTIN (xvstelm_w, LARCH_VOID_FTYPE_V8SI_CVPOINTER_SI_UQI), LASX_NO_TARGET_BUILTIN (xvstelm_d, LARCH_VOID_FTYPE_V4DI_CVPOINTER_SI_UQI), LASX_BUILTIN (xvinsve0_w, LARCH_V8SI_FTYPE_V8SI_V8SI_UQI), LASX_BUILTIN (xvinsve0_d, LARCH_V4DI_FTYPE_V4DI_V4DI_UQI), LASX_BUILTIN (xvpickve_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvpickve_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvpickve_w_f, LARCH_V8SF_FTYPE_V8SF_UQI), LASX_BUILTIN (xvpickve_d_f, LARCH_V4DF_FTYPE_V4DF_UQI), LASX_BUILTIN (xvssrlrn_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvssrlrn_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvssrlrn_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvssrln_b_h, LARCH_V32QI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvssrln_h_w, LARCH_V16HI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvssrln_w_d, LARCH_V8SI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvorn_v, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvldi, LARCH_V4DI_FTYPE_HI), LASX_BUILTIN (xvldx, LARCH_V32QI_FTYPE_CVPOINTER_DI), LASX_NO_TARGET_BUILTIN (xvstx, LARCH_VOID_FTYPE_V32QI_CVPOINTER_DI), LASX_BUILTIN (xvextl_qu_du, LARCH_UV4DI_FTYPE_UV4DI), /* LASX */ LASX_BUILTIN (xvinsgr2vr_w, LARCH_V8SI_FTYPE_V8SI_SI_UQI), LASX_BUILTIN (xvinsgr2vr_d, LARCH_V4DI_FTYPE_V4DI_DI_UQI), LASX_BUILTIN (xvreplve0_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvreplve0_h, LARCH_V16HI_FTYPE_V16HI), LASX_BUILTIN (xvreplve0_w, LARCH_V8SI_FTYPE_V8SI), LASX_BUILTIN (xvreplve0_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvreplve0_q, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_h_b, LARCH_V16HI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_w_h, LARCH_V8SI_FTYPE_V16HI), LASX_BUILTIN (vext2xv_d_w, LARCH_V4DI_FTYPE_V8SI), LASX_BUILTIN (vext2xv_w_b, LARCH_V8SI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_d_h, LARCH_V4DI_FTYPE_V16HI), LASX_BUILTIN (vext2xv_d_b, LARCH_V4DI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_hu_bu, LARCH_V16HI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_wu_hu, LARCH_V8SI_FTYPE_V16HI), LASX_BUILTIN (vext2xv_du_wu, LARCH_V4DI_FTYPE_V8SI), LASX_BUILTIN (vext2xv_wu_bu, LARCH_V8SI_FTYPE_V32QI), LASX_BUILTIN (vext2xv_du_hu, LARCH_V4DI_FTYPE_V16HI), LASX_BUILTIN (vext2xv_du_bu, LARCH_V4DI_FTYPE_V32QI), LASX_BUILTIN (xvpermi_q, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvpermi_d, LARCH_V4DI_FTYPE_V4DI_USI), LASX_BUILTIN (xvperm_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN_TEST_BRANCH (xbz_b, LARCH_SI_FTYPE_UV32QI), LASX_BUILTIN_TEST_BRANCH (xbz_h, LARCH_SI_FTYPE_UV16HI), LASX_BUILTIN_TEST_BRANCH (xbz_w, LARCH_SI_FTYPE_UV8SI), LASX_BUILTIN_TEST_BRANCH (xbz_d, LARCH_SI_FTYPE_UV4DI), LASX_BUILTIN_TEST_BRANCH (xbnz_b, LARCH_SI_FTYPE_UV32QI), LASX_BUILTIN_TEST_BRANCH (xbnz_h, LARCH_SI_FTYPE_UV16HI), LASX_BUILTIN_TEST_BRANCH (xbnz_w, LARCH_SI_FTYPE_UV8SI), LASX_BUILTIN_TEST_BRANCH (xbnz_d, LARCH_SI_FTYPE_UV4DI), LASX_BUILTIN_TEST_BRANCH (xbz_v, LARCH_SI_FTYPE_UV32QI), LASX_BUILTIN_TEST_BRANCH (xbnz_v, LARCH_SI_FTYPE_UV32QI), LASX_BUILTIN (xvldrepl_b, LARCH_V32QI_FTYPE_CVPOINTER_SI), LASX_BUILTIN (xvldrepl_h, LARCH_V16HI_FTYPE_CVPOINTER_SI), LASX_BUILTIN (xvldrepl_w, LARCH_V8SI_FTYPE_CVPOINTER_SI), LASX_BUILTIN (xvldrepl_d, LARCH_V4DI_FTYPE_CVPOINTER_SI), LASX_BUILTIN (xvpickve2gr_w, LARCH_SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvpickve2gr_wu, LARCH_USI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvpickve2gr_d, LARCH_DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvpickve2gr_du, LARCH_UDI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvaddwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvaddwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvaddwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvaddwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvaddwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvaddwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvaddwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvaddwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvsubwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsubwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsubwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsubwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsubwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsubwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvsubwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvsubwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmulwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmulwev_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmulwev_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmulwev_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmulwev_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmulwev_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmulwev_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmulwev_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvaddwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvaddwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvaddwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvaddwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvaddwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvaddwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvaddwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvaddwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvsubwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsubwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvsubwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvsubwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvsubwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvsubwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvsubwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvsubwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvmulwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvmulwod_d_w, LARCH_V4DI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvmulwod_w_h, LARCH_V8SI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvmulwod_h_b, LARCH_V16HI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvmulwod_q_du, LARCH_V4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmulwod_d_wu, LARCH_V4DI_FTYPE_UV8SI_UV8SI), LASX_BUILTIN (xvmulwod_w_hu, LARCH_V8SI_FTYPE_UV16HI_UV16HI), LASX_BUILTIN (xvmulwod_h_bu, LARCH_V16HI_FTYPE_UV32QI_UV32QI), LASX_BUILTIN (xvaddwev_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI), LASX_BUILTIN (xvaddwev_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI), LASX_BUILTIN (xvaddwev_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI), LASX_BUILTIN (xvmulwev_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI), LASX_BUILTIN (xvmulwev_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI), LASX_BUILTIN (xvmulwev_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI), LASX_BUILTIN (xvaddwod_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI), LASX_BUILTIN (xvaddwod_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI), LASX_BUILTIN (xvaddwod_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI), LASX_BUILTIN (xvmulwod_d_wu_w, LARCH_V4DI_FTYPE_UV8SI_V8SI), LASX_BUILTIN (xvmulwod_w_hu_h, LARCH_V8SI_FTYPE_UV16HI_V16HI), LASX_BUILTIN (xvmulwod_h_bu_b, LARCH_V16HI_FTYPE_UV32QI_V32QI), LASX_BUILTIN (xvhaddw_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvhaddw_qu_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvhsubw_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvhsubw_qu_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI), LASX_BUILTIN (xvmaddwev_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI), LASX_BUILTIN (xvmaddwev_d_w, LARCH_V4DI_FTYPE_V4DI_V8SI_V8SI), LASX_BUILTIN (xvmaddwev_w_h, LARCH_V8SI_FTYPE_V8SI_V16HI_V16HI), LASX_BUILTIN (xvmaddwev_h_b, LARCH_V16HI_FTYPE_V16HI_V32QI_V32QI), LASX_BUILTIN (xvmaddwev_q_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI_UV4DI), LASX_BUILTIN (xvmaddwev_d_wu, LARCH_UV4DI_FTYPE_UV4DI_UV8SI_UV8SI), LASX_BUILTIN (xvmaddwev_w_hu, LARCH_UV8SI_FTYPE_UV8SI_UV16HI_UV16HI), LASX_BUILTIN (xvmaddwev_h_bu, LARCH_UV16HI_FTYPE_UV16HI_UV32QI_UV32QI), LASX_BUILTIN (xvmaddwod_q_d, LARCH_V4DI_FTYPE_V4DI_V4DI_V4DI), LASX_BUILTIN (xvmaddwod_d_w, LARCH_V4DI_FTYPE_V4DI_V8SI_V8SI), LASX_BUILTIN (xvmaddwod_w_h, LARCH_V8SI_FTYPE_V8SI_V16HI_V16HI), LASX_BUILTIN (xvmaddwod_h_b, LARCH_V16HI_FTYPE_V16HI_V32QI_V32QI), LASX_BUILTIN (xvmaddwod_q_du, LARCH_UV4DI_FTYPE_UV4DI_UV4DI_UV4DI), LASX_BUILTIN (xvmaddwod_d_wu, LARCH_UV4DI_FTYPE_UV4DI_UV8SI_UV8SI), LASX_BUILTIN (xvmaddwod_w_hu, LARCH_UV8SI_FTYPE_UV8SI_UV16HI_UV16HI), LASX_BUILTIN (xvmaddwod_h_bu, LARCH_UV16HI_FTYPE_UV16HI_UV32QI_UV32QI), LASX_BUILTIN (xvmaddwev_q_du_d, LARCH_V4DI_FTYPE_V4DI_UV4DI_V4DI), LASX_BUILTIN (xvmaddwev_d_wu_w, LARCH_V4DI_FTYPE_V4DI_UV8SI_V8SI), LASX_BUILTIN (xvmaddwev_w_hu_h, LARCH_V8SI_FTYPE_V8SI_UV16HI_V16HI), LASX_BUILTIN (xvmaddwev_h_bu_b, LARCH_V16HI_FTYPE_V16HI_UV32QI_V32QI), LASX_BUILTIN (xvmaddwod_q_du_d, LARCH_V4DI_FTYPE_V4DI_UV4DI_V4DI), LASX_BUILTIN (xvmaddwod_d_wu_w, LARCH_V4DI_FTYPE_V4DI_UV8SI_V8SI), LASX_BUILTIN (xvmaddwod_w_hu_h, LARCH_V8SI_FTYPE_V8SI_UV16HI_V16HI), LASX_BUILTIN (xvmaddwod_h_bu_b, LARCH_V16HI_FTYPE_V16HI_UV32QI_V32QI), LASX_BUILTIN (xvrotr_b, LARCH_V32QI_FTYPE_V32QI_V32QI), LASX_BUILTIN (xvrotr_h, LARCH_V16HI_FTYPE_V16HI_V16HI), LASX_BUILTIN (xvrotr_w, LARCH_V8SI_FTYPE_V8SI_V8SI), LASX_BUILTIN (xvrotr_d, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvadd_q, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvsub_q, LARCH_V4DI_FTYPE_V4DI_V4DI), LASX_BUILTIN (xvaddwev_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI), LASX_BUILTIN (xvaddwod_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI), LASX_BUILTIN (xvmulwev_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI), LASX_BUILTIN (xvmulwod_q_du_d, LARCH_V4DI_FTYPE_UV4DI_V4DI), LASX_BUILTIN (xvmskgez_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvmsknz_b, LARCH_V32QI_FTYPE_V32QI), LASX_BUILTIN (xvexth_h_b, LARCH_V16HI_FTYPE_V32QI), LASX_BUILTIN (xvexth_w_h, LARCH_V8SI_FTYPE_V16HI), LASX_BUILTIN (xvexth_d_w, LARCH_V4DI_FTYPE_V8SI), LASX_BUILTIN (xvexth_q_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvexth_hu_bu, LARCH_UV16HI_FTYPE_UV32QI), LASX_BUILTIN (xvexth_wu_hu, LARCH_UV8SI_FTYPE_UV16HI), LASX_BUILTIN (xvexth_du_wu, LARCH_UV4DI_FTYPE_UV8SI), LASX_BUILTIN (xvexth_qu_du, LARCH_UV4DI_FTYPE_UV4DI), LASX_BUILTIN (xvrotri_b, LARCH_V32QI_FTYPE_V32QI_UQI), LASX_BUILTIN (xvrotri_h, LARCH_V16HI_FTYPE_V16HI_UQI), LASX_BUILTIN (xvrotri_w, LARCH_V8SI_FTYPE_V8SI_UQI), LASX_BUILTIN (xvrotri_d, LARCH_V4DI_FTYPE_V4DI_UQI), LASX_BUILTIN (xvextl_q_d, LARCH_V4DI_FTYPE_V4DI), LASX_BUILTIN (xvsrlni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvsrlni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvsrlni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvsrlni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvsrlrni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvsrlrni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvsrlrni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvsrlrni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrlni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvssrlni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvssrlni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvssrlni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrlni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI), LASX_BUILTIN (xvssrlni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI), LASX_BUILTIN (xvssrlni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI), LASX_BUILTIN (xvssrlni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI), LASX_BUILTIN (xvssrlrni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvssrlrni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvssrlrni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvssrlrni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrlrni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI), LASX_BUILTIN (xvssrlrni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI), LASX_BUILTIN (xvssrlrni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI), LASX_BUILTIN (xvssrlrni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI), LASX_BUILTIN (xvsrani_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvsrani_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvsrani_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvsrani_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvsrarni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvsrarni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvsrarni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvsrarni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrani_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvssrani_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvssrani_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvssrani_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrani_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI), LASX_BUILTIN (xvssrani_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI), LASX_BUILTIN (xvssrani_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI), LASX_BUILTIN (xvssrani_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI), LASX_BUILTIN (xvssrarni_b_h, LARCH_V32QI_FTYPE_V32QI_V32QI_USI), LASX_BUILTIN (xvssrarni_h_w, LARCH_V16HI_FTYPE_V16HI_V16HI_USI), LASX_BUILTIN (xvssrarni_w_d, LARCH_V8SI_FTYPE_V8SI_V8SI_USI), LASX_BUILTIN (xvssrarni_d_q, LARCH_V4DI_FTYPE_V4DI_V4DI_USI), LASX_BUILTIN (xvssrarni_bu_h, LARCH_UV32QI_FTYPE_UV32QI_V32QI_USI), LASX_BUILTIN (xvssrarni_hu_w, LARCH_UV16HI_FTYPE_UV16HI_V16HI_USI), LASX_BUILTIN (xvssrarni_wu_d, LARCH_UV8SI_FTYPE_UV8SI_V8SI_USI), LASX_BUILTIN (xvssrarni_du_q, LARCH_UV4DI_FTYPE_UV4DI_V4DI_USI) }; /* Index I is the function declaration for loongarch_builtins[I], or null if the function isn't defined on this target. */ static GTY (()) tree loongarch_builtin_decls[ARRAY_SIZE (loongarch_builtins)]; /* Get the index I of the function declaration for loongarch_builtin_decls[I] using the instruction code or return null if not defined for the target. */ static GTY (()) int loongarch_get_builtin_decl_index[NUM_INSN_CODES]; /* MODE is a vector mode whose elements have type TYPE. Return the type of the vector itself. */ static tree loongarch_builtin_vector_type (tree type, machine_mode mode) { static tree types[2 * (int) MAX_MACHINE_MODE]; int mode_index; mode_index = (int) mode; if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)) mode_index += MAX_MACHINE_MODE; if (types[mode_index] == NULL_TREE) types[mode_index] = build_vector_type_for_mode (type, mode); return types[mode_index]; } /* Return a type for 'const volatile void *'. */ static tree loongarch_build_cvpointer_type (void) { static tree cache; if (cache == NULL_TREE) cache = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)); return cache; } /* Source-level argument types. */ #define LARCH_ATYPE_VOID void_type_node #define LARCH_ATYPE_INT integer_type_node #define LARCH_ATYPE_POINTER ptr_type_node #define LARCH_ATYPE_CVPOINTER loongarch_build_cvpointer_type () #define LARCH_ATYPE_BOOLEAN boolean_type_node /* Standard mode-based argument types. */ #define LARCH_ATYPE_QI intQI_type_node #define LARCH_ATYPE_UQI unsigned_intQI_type_node #define LARCH_ATYPE_HI intHI_type_node #define LARCH_ATYPE_UHI unsigned_intHI_type_node #define LARCH_ATYPE_SI intSI_type_node #define LARCH_ATYPE_USI unsigned_intSI_type_node #define LARCH_ATYPE_DI intDI_type_node #define LARCH_ATYPE_UDI unsigned_intDI_type_node #define LARCH_ATYPE_SF float_type_node #define LARCH_ATYPE_DF double_type_node /* Vector argument types. */ #define LARCH_ATYPE_V2SF \ loongarch_builtin_vector_type (float_type_node, V2SFmode) #define LARCH_ATYPE_V2HI \ loongarch_builtin_vector_type (intHI_type_node, V2HImode) #define LARCH_ATYPE_V2SI \ loongarch_builtin_vector_type (intSI_type_node, V2SImode) #define LARCH_ATYPE_V4QI \ loongarch_builtin_vector_type (intQI_type_node, V4QImode) #define LARCH_ATYPE_V4HI \ loongarch_builtin_vector_type (intHI_type_node, V4HImode) #define LARCH_ATYPE_V8QI \ loongarch_builtin_vector_type (intQI_type_node, V8QImode) #define LARCH_ATYPE_V2DI \ loongarch_builtin_vector_type (long_long_integer_type_node, V2DImode) #define LARCH_ATYPE_V4SI \ loongarch_builtin_vector_type (intSI_type_node, V4SImode) #define LARCH_ATYPE_V8HI \ loongarch_builtin_vector_type (intHI_type_node, V8HImode) #define LARCH_ATYPE_V16QI \ loongarch_builtin_vector_type (intQI_type_node, V16QImode) #define LARCH_ATYPE_V2DF \ loongarch_builtin_vector_type (double_type_node, V2DFmode) #define LARCH_ATYPE_V4SF \ loongarch_builtin_vector_type (float_type_node, V4SFmode) /* LoongArch ASX. */ #define LARCH_ATYPE_V4DI \ loongarch_builtin_vector_type (long_long_integer_type_node, V4DImode) #define LARCH_ATYPE_V8SI \ loongarch_builtin_vector_type (intSI_type_node, V8SImode) #define LARCH_ATYPE_V16HI \ loongarch_builtin_vector_type (intHI_type_node, V16HImode) #define LARCH_ATYPE_V32QI \ loongarch_builtin_vector_type (intQI_type_node, V32QImode) #define LARCH_ATYPE_V4DF \ loongarch_builtin_vector_type (double_type_node, V4DFmode) #define LARCH_ATYPE_V8SF \ loongarch_builtin_vector_type (float_type_node, V8SFmode) #define LARCH_ATYPE_UV2DI \ loongarch_builtin_vector_type (long_long_unsigned_type_node, V2DImode) #define LARCH_ATYPE_UV4SI \ loongarch_builtin_vector_type (unsigned_intSI_type_node, V4SImode) #define LARCH_ATYPE_UV8HI \ loongarch_builtin_vector_type (unsigned_intHI_type_node, V8HImode) #define LARCH_ATYPE_UV16QI \ loongarch_builtin_vector_type (unsigned_intQI_type_node, V16QImode) #define LARCH_ATYPE_UV4DI \ loongarch_builtin_vector_type (long_long_unsigned_type_node, V4DImode) #define LARCH_ATYPE_UV8SI \ loongarch_builtin_vector_type (unsigned_intSI_type_node, V8SImode) #define LARCH_ATYPE_UV16HI \ loongarch_builtin_vector_type (unsigned_intHI_type_node, V16HImode) #define LARCH_ATYPE_UV32QI \ loongarch_builtin_vector_type (unsigned_intQI_type_node, V32QImode) #define LARCH_ATYPE_UV2SI \ loongarch_builtin_vector_type (unsigned_intSI_type_node, V2SImode) #define LARCH_ATYPE_UV4HI \ loongarch_builtin_vector_type (unsigned_intHI_type_node, V4HImode) #define LARCH_ATYPE_UV8QI \ loongarch_builtin_vector_type (unsigned_intQI_type_node, V8QImode) /* LARCH_FTYPE_ATYPESN takes N LARCH_FTYPES-like type codes and lists their associated LARCH_ATYPEs. */ #define LARCH_FTYPE_ATYPES1(A, B) LARCH_ATYPE_##A, LARCH_ATYPE_##B #define LARCH_FTYPE_ATYPES2(A, B, C) \ LARCH_ATYPE_##A, LARCH_ATYPE_##B, LARCH_ATYPE_##C #define LARCH_FTYPE_ATYPES3(A, B, C, D) \ LARCH_ATYPE_##A, LARCH_ATYPE_##B, LARCH_ATYPE_##C, LARCH_ATYPE_##D #define LARCH_FTYPE_ATYPES4(A, B, C, D, E) \ LARCH_ATYPE_##A, LARCH_ATYPE_##B, LARCH_ATYPE_##C, LARCH_ATYPE_##D, \ LARCH_ATYPE_##E /* Return the function type associated with function prototype TYPE. */ static tree loongarch_build_function_type (enum loongarch_function_type type) { static tree types[(int) LARCH_MAX_FTYPE_MAX]; if (types[(int) type] == NULL_TREE) switch (type) { #define DEF_LARCH_FTYPE(NUM, ARGS) \ case LARCH_FTYPE_NAME##NUM ARGS: \ types[(int) type] \ = build_function_type_list (LARCH_FTYPE_ATYPES##NUM ARGS, NULL_TREE); \ break; #include "config/loongarch/loongarch-ftypes.def" #undef DEF_LARCH_FTYPE default: gcc_unreachable (); } return types[(int) type]; } /* Implement TARGET_INIT_BUILTINS. */ void loongarch_init_builtins (void) { const struct loongarch_builtin_description *d; unsigned int i; tree type; /* Register the type float128_type_node as a built-in type and give it an alias "__float128". */ (*lang_hooks.types.register_builtin_type) (float128_type_node, "__float128"); /* Iterate through all of the bdesc arrays, initializing all of the builtin functions. */ for (i = 0; i < ARRAY_SIZE (loongarch_builtins); i++) { d = &loongarch_builtins[i]; type = loongarch_build_function_type (d->function_type); loongarch_builtin_decls[i] = add_builtin_function (d->name, type, i, BUILT_IN_MD, NULL, NULL); loongarch_get_builtin_decl_index[d->icode] = i; } } /* Implement TARGET_BUILTIN_DECL. */ tree loongarch_builtin_decl (unsigned int code, bool initialize_p ATTRIBUTE_UNUSED) { if (code >= ARRAY_SIZE (loongarch_builtins)) return error_mark_node; return loongarch_builtin_decls[code]; } /* Implement TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION. */ tree loongarch_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in) { machine_mode in_mode, out_mode; int in_n, out_n; if (TREE_CODE (type_out) != VECTOR_TYPE || TREE_CODE (type_in) != VECTOR_TYPE || !ISA_HAS_LSX) return NULL_TREE; out_mode = TYPE_MODE (TREE_TYPE (type_out)); out_n = TYPE_VECTOR_SUBPARTS (type_out); in_mode = TYPE_MODE (TREE_TYPE (type_in)); in_n = TYPE_VECTOR_SUBPARTS (type_in); /* INSN is the name of the associated instruction pattern, without the leading CODE_FOR_. */ #define LARCH_GET_BUILTIN(INSN) \ loongarch_builtin_decls[loongarch_get_builtin_decl_index[CODE_FOR_##INSN]] switch (fn) { CASE_CFN_CEIL: if (out_mode == DFmode && in_mode == DFmode) { if (out_n == 2 && in_n == 2) return LARCH_GET_BUILTIN (lsx_vfrintrp_d); if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lasx_xvfrintrp_d); } if (out_mode == SFmode && in_mode == SFmode) { if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lsx_vfrintrp_s); if (out_n == 8 && in_n == 8) return LARCH_GET_BUILTIN (lasx_xvfrintrp_s); } break; CASE_CFN_TRUNC: if (out_mode == DFmode && in_mode == DFmode) { if (out_n == 2 && in_n == 2) return LARCH_GET_BUILTIN (lsx_vfrintrz_d); if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lasx_xvfrintrz_d); } if (out_mode == SFmode && in_mode == SFmode) { if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lsx_vfrintrz_s); if (out_n == 8 && in_n == 8) return LARCH_GET_BUILTIN (lasx_xvfrintrz_s); } break; CASE_CFN_RINT: CASE_CFN_ROUND: if (out_mode == DFmode && in_mode == DFmode) { if (out_n == 2 && in_n == 2) return LARCH_GET_BUILTIN (lsx_vfrint_d); if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lasx_xvfrint_d); } if (out_mode == SFmode && in_mode == SFmode) { if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lsx_vfrint_s); if (out_n == 8 && in_n == 8) return LARCH_GET_BUILTIN (lasx_xvfrint_s); } break; CASE_CFN_FLOOR: if (out_mode == DFmode && in_mode == DFmode) { if (out_n == 2 && in_n == 2) return LARCH_GET_BUILTIN (lsx_vfrintrm_d); if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lasx_xvfrintrm_d); } if (out_mode == SFmode && in_mode == SFmode) { if (out_n == 4 && in_n == 4) return LARCH_GET_BUILTIN (lsx_vfrintrm_s); if (out_n == 8 && in_n == 8) return LARCH_GET_BUILTIN (lasx_xvfrintrm_s); } break; default: break; } return NULL_TREE; } /* Take argument ARGNO from EXP's argument list and convert it into an expand operand. Store the operand in *OP. */ static void loongarch_prepare_builtin_arg (struct expand_operand *op, tree exp, unsigned int argno) { tree arg; rtx value; arg = CALL_EXPR_ARG (exp, argno); value = expand_normal (arg); create_input_operand (op, value, TYPE_MODE (TREE_TYPE (arg))); } /* Return a const_int vector of VAL with mode MODE. */ rtx loongarch_gen_const_int_vector (machine_mode mode, HOST_WIDE_INT val) { rtx c = gen_int_mode (val, GET_MODE_INNER (mode)); return gen_const_vec_duplicate (mode, c); } /* Expand instruction ICODE as part of a built-in function sequence. Use the first NOPS elements of OPS as the instruction's operands. HAS_TARGET_P is true if operand 0 is a target; it is false if the instruction has no target. Return the target rtx if HAS_TARGET_P, otherwise return const0_rtx. */ static rtx loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops, struct expand_operand *ops, bool has_target_p) { machine_mode imode; int rangelo = 0, rangehi = 0, error_opno = 0; switch (icode) { case CODE_FOR_lsx_vaddi_bu: case CODE_FOR_lsx_vaddi_hu: case CODE_FOR_lsx_vaddi_wu: case CODE_FOR_lsx_vaddi_du: case CODE_FOR_lsx_vslti_bu: case CODE_FOR_lsx_vslti_hu: case CODE_FOR_lsx_vslti_wu: case CODE_FOR_lsx_vslti_du: case CODE_FOR_lsx_vslei_bu: case CODE_FOR_lsx_vslei_hu: case CODE_FOR_lsx_vslei_wu: case CODE_FOR_lsx_vslei_du: case CODE_FOR_lsx_vmaxi_bu: case CODE_FOR_lsx_vmaxi_hu: case CODE_FOR_lsx_vmaxi_wu: case CODE_FOR_lsx_vmaxi_du: case CODE_FOR_lsx_vmini_bu: case CODE_FOR_lsx_vmini_hu: case CODE_FOR_lsx_vmini_wu: case CODE_FOR_lsx_vmini_du: case CODE_FOR_lsx_vsubi_bu: case CODE_FOR_lsx_vsubi_hu: case CODE_FOR_lsx_vsubi_wu: case CODE_FOR_lsx_vsubi_du: case CODE_FOR_lasx_xvaddi_bu: case CODE_FOR_lasx_xvaddi_hu: case CODE_FOR_lasx_xvaddi_wu: case CODE_FOR_lasx_xvaddi_du: case CODE_FOR_lasx_xvslti_bu: case CODE_FOR_lasx_xvslti_hu: case CODE_FOR_lasx_xvslti_wu: case CODE_FOR_lasx_xvslti_du: case CODE_FOR_lasx_xvslei_bu: case CODE_FOR_lasx_xvslei_hu: case CODE_FOR_lasx_xvslei_wu: case CODE_FOR_lasx_xvslei_du: case CODE_FOR_lasx_xvmaxi_bu: case CODE_FOR_lasx_xvmaxi_hu: case CODE_FOR_lasx_xvmaxi_wu: case CODE_FOR_lasx_xvmaxi_du: case CODE_FOR_lasx_xvmini_bu: case CODE_FOR_lasx_xvmini_hu: case CODE_FOR_lasx_xvmini_wu: case CODE_FOR_lasx_xvmini_du: case CODE_FOR_lasx_xvsubi_bu: case CODE_FOR_lasx_xvsubi_hu: case CODE_FOR_lasx_xvsubi_wu: case CODE_FOR_lasx_xvsubi_du: gcc_assert (has_target_p && nops == 3); /* We only generate a vector of constants iff the second argument is an immediate. We also validate the range of the immediate. */ if (CONST_INT_P (ops[2].value)) { rangelo = 0; rangehi = 31; if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi)) { ops[2].mode = ops[0].mode; ops[2].value = loongarch_gen_const_int_vector (ops[2].mode, INTVAL (ops[2].value)); } else error_opno = 2; } break; case CODE_FOR_lsx_vseqi_b: case CODE_FOR_lsx_vseqi_h: case CODE_FOR_lsx_vseqi_w: case CODE_FOR_lsx_vseqi_d: case CODE_FOR_lsx_vslti_b: case CODE_FOR_lsx_vslti_h: case CODE_FOR_lsx_vslti_w: case CODE_FOR_lsx_vslti_d: case CODE_FOR_lsx_vslei_b: case CODE_FOR_lsx_vslei_h: case CODE_FOR_lsx_vslei_w: case CODE_FOR_lsx_vslei_d: case CODE_FOR_lsx_vmaxi_b: case CODE_FOR_lsx_vmaxi_h: case CODE_FOR_lsx_vmaxi_w: case CODE_FOR_lsx_vmaxi_d: case CODE_FOR_lsx_vmini_b: case CODE_FOR_lsx_vmini_h: case CODE_FOR_lsx_vmini_w: case CODE_FOR_lsx_vmini_d: case CODE_FOR_lasx_xvseqi_b: case CODE_FOR_lasx_xvseqi_h: case CODE_FOR_lasx_xvseqi_w: case CODE_FOR_lasx_xvseqi_d: case CODE_FOR_lasx_xvslti_b: case CODE_FOR_lasx_xvslti_h: case CODE_FOR_lasx_xvslti_w: case CODE_FOR_lasx_xvslti_d: case CODE_FOR_lasx_xvslei_b: case CODE_FOR_lasx_xvslei_h: case CODE_FOR_lasx_xvslei_w: case CODE_FOR_lasx_xvslei_d: case CODE_FOR_lasx_xvmaxi_b: case CODE_FOR_lasx_xvmaxi_h: case CODE_FOR_lasx_xvmaxi_w: case CODE_FOR_lasx_xvmaxi_d: case CODE_FOR_lasx_xvmini_b: case CODE_FOR_lasx_xvmini_h: case CODE_FOR_lasx_xvmini_w: case CODE_FOR_lasx_xvmini_d: gcc_assert (has_target_p && nops == 3); /* We only generate a vector of constants iff the second argument is an immediate. We also validate the range of the immediate. */ if (CONST_INT_P (ops[2].value)) { rangelo = -16; rangehi = 15; if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi)) { ops[2].mode = ops[0].mode; ops[2].value = loongarch_gen_const_int_vector (ops[2].mode, INTVAL (ops[2].value)); } else error_opno = 2; } break; case CODE_FOR_lsx_vandi_b: case CODE_FOR_lsx_vori_b: case CODE_FOR_lsx_vnori_b: case CODE_FOR_lsx_vxori_b: case CODE_FOR_lasx_xvandi_b: case CODE_FOR_lasx_xvori_b: case CODE_FOR_lasx_xvnori_b: case CODE_FOR_lasx_xvxori_b: gcc_assert (has_target_p && nops == 3); if (!CONST_INT_P (ops[2].value)) break; ops[2].mode = ops[0].mode; ops[2].value = loongarch_gen_const_int_vector (ops[2].mode, INTVAL (ops[2].value)); break; case CODE_FOR_lsx_vbitseli_b: case CODE_FOR_lasx_xvbitseli_b: gcc_assert (has_target_p && nops == 4); if (!CONST_INT_P (ops[3].value)) break; ops[3].mode = ops[0].mode; ops[3].value = loongarch_gen_const_int_vector (ops[3].mode, INTVAL (ops[3].value)); break; case CODE_FOR_lsx_vreplgr2vr_b: case CODE_FOR_lsx_vreplgr2vr_h: case CODE_FOR_lsx_vreplgr2vr_w: case CODE_FOR_lsx_vreplgr2vr_d: case CODE_FOR_lasx_xvreplgr2vr_b: case CODE_FOR_lasx_xvreplgr2vr_h: case CODE_FOR_lasx_xvreplgr2vr_w: case CODE_FOR_lasx_xvreplgr2vr_d: /* Map the built-ins to vector fill operations. We need fix up the mode for the element being inserted. */ gcc_assert (has_target_p && nops == 2); imode = GET_MODE_INNER (ops[0].mode); ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode); ops[1].mode = imode; break; case CODE_FOR_lsx_vilvh_b: case CODE_FOR_lsx_vilvh_h: case CODE_FOR_lsx_vilvh_w: case CODE_FOR_lsx_vilvh_d: case CODE_FOR_lsx_vilvl_b: case CODE_FOR_lsx_vilvl_h: case CODE_FOR_lsx_vilvl_w: case CODE_FOR_lsx_vilvl_d: case CODE_FOR_lsx_vpackev_b: case CODE_FOR_lsx_vpackev_h: case CODE_FOR_lsx_vpackev_w: case CODE_FOR_lsx_vpackod_b: case CODE_FOR_lsx_vpackod_h: case CODE_FOR_lsx_vpackod_w: case CODE_FOR_lsx_vpickev_b: case CODE_FOR_lsx_vpickev_h: case CODE_FOR_lsx_vpickev_w: case CODE_FOR_lsx_vpickod_b: case CODE_FOR_lsx_vpickod_h: case CODE_FOR_lsx_vpickod_w: case CODE_FOR_lsx_vandn_v: case CODE_FOR_lasx_xvilvh_b: case CODE_FOR_lasx_xvilvh_h: case CODE_FOR_lasx_xvilvh_w: case CODE_FOR_lasx_xvilvh_d: case CODE_FOR_lasx_xvilvl_b: case CODE_FOR_lasx_xvilvl_h: case CODE_FOR_lasx_xvilvl_w: case CODE_FOR_lasx_xvilvl_d: case CODE_FOR_lasx_xvpackev_b: case CODE_FOR_lasx_xvpackev_h: case CODE_FOR_lasx_xvpackev_w: case CODE_FOR_lasx_xvpackod_b: case CODE_FOR_lasx_xvpackod_h: case CODE_FOR_lasx_xvpackod_w: case CODE_FOR_lasx_xvpickev_b: case CODE_FOR_lasx_xvpickev_h: case CODE_FOR_lasx_xvpickev_w: case CODE_FOR_lasx_xvpickod_b: case CODE_FOR_lasx_xvpickod_h: case CODE_FOR_lasx_xvpickod_w: case CODE_FOR_lasx_xvandn_v: /* Swap the operands 1 and 2 for interleave operations. Built-ins follow convention of ISA, which have op1 as higher component and op2 as lower component. However, the VEC_PERM op in tree and vec_concat in RTL expects first operand to be lower component, because of which this swap is needed for builtins. */ gcc_assert (has_target_p && nops == 3); std::swap (ops[1], ops[2]); break; case CODE_FOR_lsx_vslli_b: case CODE_FOR_lsx_vslli_h: case CODE_FOR_lsx_vslli_w: case CODE_FOR_lsx_vslli_d: case CODE_FOR_lsx_vsrai_b: case CODE_FOR_lsx_vsrai_h: case CODE_FOR_lsx_vsrai_w: case CODE_FOR_lsx_vsrai_d: case CODE_FOR_lsx_vsrli_b: case CODE_FOR_lsx_vsrli_h: case CODE_FOR_lsx_vsrli_w: case CODE_FOR_lsx_vsrli_d: case CODE_FOR_lasx_xvslli_b: case CODE_FOR_lasx_xvslli_h: case CODE_FOR_lasx_xvslli_w: case CODE_FOR_lasx_xvslli_d: case CODE_FOR_lasx_xvsrai_b: case CODE_FOR_lasx_xvsrai_h: case CODE_FOR_lasx_xvsrai_w: case CODE_FOR_lasx_xvsrai_d: case CODE_FOR_lasx_xvsrli_b: case CODE_FOR_lasx_xvsrli_h: case CODE_FOR_lasx_xvsrli_w: case CODE_FOR_lasx_xvsrli_d: gcc_assert (has_target_p && nops == 3); if (CONST_INT_P (ops[2].value)) { rangelo = 0; rangehi = GET_MODE_UNIT_BITSIZE (ops[0].mode) - 1; if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi)) { ops[2].mode = ops[0].mode; ops[2].value = loongarch_gen_const_int_vector (ops[2].mode, INTVAL (ops[2].value)); } else error_opno = 2; } break; case CODE_FOR_lsx_vinsgr2vr_b: case CODE_FOR_lsx_vinsgr2vr_h: case CODE_FOR_lsx_vinsgr2vr_w: case CODE_FOR_lsx_vinsgr2vr_d: /* Map the built-ins to insert operations. We need to swap operands, fix up the mode for the element being inserted, and generate a bit mask for vec_merge. */ gcc_assert (has_target_p && nops == 4); std::swap (ops[1], ops[2]); imode = GET_MODE_INNER (ops[0].mode); ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode); ops[1].mode = imode; rangelo = 0; rangehi = GET_MODE_NUNITS (ops[0].mode) - 1; if (CONST_INT_P (ops[3].value) && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi)) ops[3].value = GEN_INT (1 << INTVAL (ops[3].value)); else error_opno = 2; break; /* Map the built-ins to element insert operations. We need to swap operands and generate a bit mask. */ gcc_assert (has_target_p && nops == 4); std::swap (ops[1], ops[2]); std::swap (ops[1], ops[3]); rangelo = 0; rangehi = GET_MODE_NUNITS (ops[0].mode) - 1; if (CONST_INT_P (ops[3].value) && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi)) ops[3].value = GEN_INT (1 << INTVAL (ops[3].value)); else error_opno = 2; break; case CODE_FOR_lsx_vshuf4i_b: case CODE_FOR_lsx_vshuf4i_h: case CODE_FOR_lsx_vshuf4i_w: case CODE_FOR_lsx_vshuf4i_w_f: gcc_assert (has_target_p && nops == 3); ops[2].value = loongarch_gen_const_int_vector_shuffle (ops[0].mode, INTVAL (ops[2].value)); break; case CODE_FOR_lasx_xvinsgr2vr_w: case CODE_FOR_lasx_xvinsgr2vr_d: /* Map the built-ins to insert operations. We need to swap operands, fix up the mode for the element being inserted, and generate a bit mask for vec_merge. */ gcc_assert (has_target_p && nops == 4); std::swap (ops[1], ops[2]); imode = GET_MODE_INNER (ops[0].mode); ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode); ops[1].mode = imode; rangelo = 0; rangehi = GET_MODE_NUNITS (ops[0].mode) - 1; if (CONST_INT_P (ops[3].value) && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi)) ops[3].value = GEN_INT (1 << INTVAL (ops[3].value)); else error_opno = 2; break; default: break; } if (error_opno != 0) { error ("argument %d to the built-in must be a constant" " in range %d to %d", error_opno, rangelo, rangehi); return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx; } else if (!maybe_expand_insn (icode, nops, ops)) { error ("invalid argument to built-in function"); return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx; } return has_target_p ? ops[0].value : const0_rtx; } /* Expand a LARCH_BUILTIN_DIRECT or LARCH_BUILTIN_DIRECT_NO_TARGET function; HAS_TARGET_P says which. EXP is the CALL_EXPR that calls the function and ICODE is the code of the associated .md pattern. TARGET, if nonnull, suggests a good place to put the result. */ static rtx loongarch_expand_builtin_direct (enum insn_code icode, rtx target, tree exp, bool has_target_p) { struct expand_operand ops[MAX_RECOG_OPERANDS]; int opno, argno; /* Map any target to operand 0. */ opno = 0; if (has_target_p) create_output_operand (&ops[opno++], target, TYPE_MODE (TREE_TYPE (exp))); /* For the vector reciprocal instructions, we need to construct a temporary parameter const1_vector. */ switch (icode) { case CODE_FOR_recipv8sf3: case CODE_FOR_recipv4df3: case CODE_FOR_recipv4sf3: case CODE_FOR_recipv2df3: loongarch_prepare_builtin_arg (&ops[2], exp, 0); create_input_operand (&ops[1], CONST1_RTX (ops[0].mode), ops[0].mode); return loongarch_expand_builtin_insn (icode, 3, ops, has_target_p); default: break; } /* Map the arguments to the other operands. */ gcc_assert (opno + call_expr_nargs (exp) == insn_data[icode].n_generator_args); for (argno = 0; argno < call_expr_nargs (exp); argno++) loongarch_prepare_builtin_arg (&ops[opno++], exp, argno); return loongarch_expand_builtin_insn (icode, opno, ops, has_target_p); } /* Expand an LSX built-in for a compare and branch instruction specified by ICODE, set a general-purpose register to 1 if the branch was taken, 0 otherwise. */ static rtx loongarch_expand_builtin_lsx_test_branch (enum insn_code icode, tree exp) { struct expand_operand ops[3]; rtx_insn *cbranch; rtx_code_label *true_label, *done_label; rtx cmp_result; true_label = gen_label_rtx (); done_label = gen_label_rtx (); create_input_operand (&ops[0], true_label, TYPE_MODE (TREE_TYPE (exp))); loongarch_prepare_builtin_arg (&ops[1], exp, 0); create_fixed_operand (&ops[2], const0_rtx); /* Make sure that the operand 1 is a REG. */ if (GET_CODE (ops[1].value) != REG) ops[1].value = force_reg (ops[1].mode, ops[1].value); if ((cbranch = maybe_gen_insn (icode, 3, ops)) == NULL_RTX) error ("failed to expand built-in function"); cmp_result = gen_reg_rtx (SImode); /* First assume that CMP_RESULT is false. */ loongarch_emit_move (cmp_result, const0_rtx); /* Branch to TRUE_LABEL if CBRANCH is taken and DONE_LABEL otherwise. */ emit_jump_insn (cbranch); emit_jump_insn (gen_jump (done_label)); emit_barrier (); /* Set CMP_RESULT to true if the branch was taken. */ emit_label (true_label); loongarch_emit_move (cmp_result, const1_rtx); emit_label (done_label); return cmp_result; } /* Implement TARGET_EXPAND_BUILTIN. */ rtx loongarch_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED, machine_mode mode ATTRIBUTE_UNUSED, int ignore ATTRIBUTE_UNUSED) { tree fndecl; unsigned int fcode; const struct loongarch_builtin_description *d; fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); fcode = DECL_MD_FUNCTION_CODE (fndecl); gcc_assert (fcode < ARRAY_SIZE (loongarch_builtins)); d = &loongarch_builtins[fcode]; if (!d->avail ()) { error_at (EXPR_LOCATION (exp), "built-in function %qD is not enabled", fndecl); return target; } switch (d->builtin_type) { case LARCH_BUILTIN_DIRECT: case LARCH_BUILTIN_LSX: case LARCH_BUILTIN_LASX: return loongarch_expand_builtin_direct (d->icode, target, exp, true); case LARCH_BUILTIN_DIRECT_NO_TARGET: return loongarch_expand_builtin_direct (d->icode, target, exp, false); case LARCH_BUILTIN_LSX_TEST_BRANCH: case LARCH_BUILTIN_LASX_TEST_BRANCH: return loongarch_expand_builtin_lsx_test_branch (d->icode, exp); } gcc_unreachable (); } /* Implement TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */ void loongarch_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update) { if (!TARGET_HARD_FLOAT_ABI) return; tree exceptions_var = create_tmp_var_raw (LARCH_ATYPE_USI); tree fcsr_orig_var = create_tmp_var_raw (LARCH_ATYPE_USI); tree fcsr_mod_var = create_tmp_var_raw (LARCH_ATYPE_USI); tree const0 = build_int_cst (LARCH_ATYPE_UQI, 0); tree get_fcsr = loongarch_builtin_decls[LARCH_MOVFCSR2GR]; tree set_fcsr = loongarch_builtin_decls[LARCH_MOVGR2FCSR]; tree get_fcsr_hold_call = build_call_expr (get_fcsr, 1, const0); tree hold_assign_orig = build4 (TARGET_EXPR, LARCH_ATYPE_USI, fcsr_orig_var, get_fcsr_hold_call, NULL, NULL); tree hold_mod_val = build2 (BIT_AND_EXPR, LARCH_ATYPE_USI, fcsr_orig_var, build_int_cst (LARCH_ATYPE_USI, 0xffe0ffe0)); tree hold_assign_mod = build4 (TARGET_EXPR, LARCH_ATYPE_USI, fcsr_mod_var, hold_mod_val, NULL, NULL); tree set_fcsr_hold_call = build_call_expr (set_fcsr, 2, const0, fcsr_mod_var); tree hold_all = build2 (COMPOUND_EXPR, LARCH_ATYPE_USI, hold_assign_orig, hold_assign_mod); *hold = build2 (COMPOUND_EXPR, void_type_node, hold_all, set_fcsr_hold_call); *clear = build_call_expr (set_fcsr, 2, const0, fcsr_mod_var); tree get_fcsr_update_call = build_call_expr (get_fcsr, 1, const0); *update = build4 (TARGET_EXPR, LARCH_ATYPE_USI, exceptions_var, get_fcsr_update_call, NULL, NULL); tree set_fcsr_update_call = build_call_expr (set_fcsr, 2, const0, fcsr_orig_var); *update = build2 (COMPOUND_EXPR, void_type_node, *update, set_fcsr_update_call); tree atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT); tree int_exceptions_var = fold_convert (integer_type_node, exceptions_var); tree atomic_feraiseexcept_call = build_call_expr (atomic_feraiseexcept, 1, int_exceptions_var); *update = build2 (COMPOUND_EXPR, void_type_node, *update, atomic_feraiseexcept_call); } /* Implement TARGET_BUILTIN_VA_LIST. */ tree loongarch_build_builtin_va_list (void) { return ptr_type_node; }