; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: sed 's/iXLen/i16/g' %s | llc -mtriple=i686-unknown -mattr=avx2,f16c | FileCheck %s --check-prefixes=X86-AVX-I16 ; RUN: sed 's/iXLen/i16/g' %s | llc -mtriple=i686-unknown -mattr=avx512fp16,avx512vl | FileCheck %s --check-prefixes=X86-FP16-I16 ; RUN: sed 's/iXLen/i16/g' %s | llc -mtriple=x86_64-unknown -mattr=avx2,f16c | FileCheck %s --check-prefixes=X64-AVX-I16 ; RUN: sed 's/iXLen/i16/g' %s | llc -mtriple=x86_64-unknown -mattr=avx512fp16,avx512vl | FileCheck %s --check-prefixes=X64-FP16-I16 ; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=i686-unknown -mattr=avx2,f16c | FileCheck %s --check-prefixes=X86-AVX-I32 ; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=i686-unknown -mattr=avx512fp16,avx512vl | FileCheck %s --check-prefixes=X86-FP16-I32 ; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=x86_64-unknown -mattr=avx2,f16c | FileCheck %s --check-prefixes=X64-AVX-I32 ; RUN: sed 's/iXLen/i32/g' %s | llc -mtriple=x86_64-unknown -mattr=avx512fp16,avx512vl | FileCheck %s --check-prefixes=X64-FP16-I32 define <1 x iXLen> @lrint_v1f16(<1 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v1f16: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: # kill: def $ax killed $ax killed $eax ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v1f16: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvttsh2si %xmm0, %eax ; X86-FP16-I16-NEXT: # kill: def $ax killed $ax killed $eax ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v1f16: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: # kill: def $ax killed $ax killed $eax ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v1f16: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvttsh2si %xmm0, %eax ; X64-FP16-I16-NEXT: # kill: def $ax killed $ax killed $eax ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v1f16: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v1f16: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvttsh2si %xmm0, %eax ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v1f16: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v1f16: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvttsh2si %xmm0, %eax ; X64-FP16-I32-NEXT: retq %a = call <1 x iXLen> @llvm.lrint.v1iXLen.v1f16(<1 x half> %x) ret <1 x iXLen> %a } declare <1 x iXLen> @llvm.lrint.v1iXLen.v1f16(<1 x half>) define <2 x iXLen> @lrint_v2f16(<2 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v2f16: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v2f16: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v2f16: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v2f16: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v2f16: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm0 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v2f16: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvtph2dq %xmm0, %xmm0 ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v2f16: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm0 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v2f16: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvtph2dq %xmm0, %xmm0 ; X64-FP16-I32-NEXT: retq %a = call <2 x iXLen> @llvm.lrint.v2iXLen.v2f16(<2 x half> %x) ret <2 x iXLen> %a } declare <2 x iXLen> @llvm.lrint.v2iXLen.v2f16(<2 x half>) define <4 x iXLen> @lrint_v4f16(<4 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v4f16: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v4f16: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v4f16: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v4f16: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v4f16: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm0 ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v4f16: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvtph2dq %xmm0, %xmm0 ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v4f16: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm0 ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v4f16: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvtph2dq %xmm0, %xmm0 ; X64-FP16-I32-NEXT: retq %a = call <4 x iXLen> @llvm.lrint.v4iXLen.v4f16(<4 x half> %x) ret <4 x iXLen> %a } declare <4 x iXLen> @llvm.lrint.v4iXLen.v4f16(<4 x half>) define <8 x iXLen> @lrint_v8f16(<8 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v8f16: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v8f16: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v8f16: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm2 = xmm0[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm1, %xmm0 ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v8f16: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvtph2w %xmm0, %xmm0 ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v8f16: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm0 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v8f16: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvtph2dq %xmm0, %ymm0 ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v8f16: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm0 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v8f16: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvtph2dq %xmm0, %ymm0 ; X64-FP16-I32-NEXT: retq %a = call <8 x iXLen> @llvm.lrint.v8iXLen.v8f16(<8 x half> %x) ret <8 x iXLen> %a } declare <8 x iXLen> @llvm.lrint.v8iXLen.v8f16(<8 x half>) define <16 x iXLen> @lrint_v16f16(<16 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v16f16: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vextracti128 $1, %ymm0, %xmm1 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm1, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm2 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm3 = xmm1[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm1, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm3 = xmm1[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm3 = xmm1[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm2, %xmm1 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm2 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm3 = xmm0[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm3 = xmm0[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm2, %xmm0 ; X86-AVX-I16-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v16f16: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvtph2w %ymm0, %ymm0 ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v16f16: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vextracti128 $1, %ymm0, %xmm1 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm1, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm2 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm3 = xmm1[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm1, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm3 = xmm1[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm3 = xmm1[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm2, %xmm1 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm2 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm3 = xmm0[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm3 = xmm0[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm2, %xmm0 ; X64-AVX-I16-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v16f16: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvtph2w %ymm0, %ymm0 ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v16f16: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm2 ; X86-AVX-I32-NEXT: vextracti128 $1, %ymm0, %xmm0 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm0[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm0 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 ; X86-AVX-I32-NEXT: vmovdqa %ymm2, %ymm0 ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v16f16: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm0 ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v16f16: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm2 = xmm0[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm3 = xmm0[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm2 ; X64-AVX-I32-NEXT: vextracti128 $1, %ymm0, %xmm0 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm1 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm1 = xmm0[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm1 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm0[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm0, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm0 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1 ; X64-AVX-I32-NEXT: vmovdqa %ymm2, %ymm0 ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v16f16: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm0 ; X64-FP16-I32-NEXT: retq %a = call <16 x iXLen> @llvm.lrint.v16iXLen.v16f16(<16 x half> %x) ret <16 x iXLen> %a } declare <16 x iXLen> @llvm.lrint.v16iXLen.v16f16(<16 x half>) define <32 x iXLen> @lrint_v32f32(<32 x half> %x) { ; X86-AVX-I16-LABEL: lrint_v32f32: ; X86-AVX-I16: # %bb.0: ; X86-AVX-I16-NEXT: vextracti128 $1, %ymm0, %xmm2 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm2, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm2[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm2 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm0[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm0[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm0 ; X86-AVX-I16-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0 ; X86-AVX-I16-NEXT: vextracti128 $1, %ymm1, %xmm2 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm2, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm2[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm2 ; X86-AVX-I16-NEXT: vpsrld $16, %xmm1, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm3 ; X86-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm1[1,1,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrlq $48, %xmm1, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm1[1,0] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm1[3,3,3,3] ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X86-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm1 ; X86-AVX-I16-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1 ; X86-AVX-I16-NEXT: retl ; ; X86-FP16-I16-LABEL: lrint_v32f32: ; X86-FP16-I16: # %bb.0: ; X86-FP16-I16-NEXT: vcvtph2w %zmm0, %zmm0 ; X86-FP16-I16-NEXT: retl ; ; X64-AVX-I16-LABEL: lrint_v32f32: ; X64-AVX-I16: # %bb.0: ; X64-AVX-I16-NEXT: vextracti128 $1, %ymm0, %xmm2 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm2, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm2[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm2 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm0, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm0[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm0, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm0[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm0[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I16-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm0 ; X64-AVX-I16-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0 ; X64-AVX-I16-NEXT: vextracti128 $1, %ymm1, %xmm2 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm2, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm2[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I16-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm2 ; X64-AVX-I16-NEXT: vpsrld $16, %xmm1, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm3 ; X64-AVX-I16-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I16-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I16-NEXT: vpinsrw $1, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vmovshdup {{.*#+}} xmm4 = xmm1[1,1,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $2, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrlq $48, %xmm1, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufpd {{.*#+}} xmm4 = xmm1[1,0] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $4, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm4 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $5, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vshufps {{.*#+}} xmm4 = xmm1[3,3,3,3] ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I16-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I16-NEXT: vpinsrw $6, %eax, %xmm3, %xmm3 ; X64-AVX-I16-NEXT: vpsrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I16-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I16-NEXT: vpinsrw $7, %eax, %xmm3, %xmm1 ; X64-AVX-I16-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1 ; X64-AVX-I16-NEXT: retq ; ; X64-FP16-I16-LABEL: lrint_v32f32: ; X64-FP16-I16: # %bb.0: ; X64-FP16-I16-NEXT: vcvtph2w %zmm0, %zmm0 ; X64-FP16-I16-NEXT: retq ; ; X86-AVX-I32-LABEL: lrint_v32f32: ; X86-AVX-I32: # %bb.0: ; X86-AVX-I32-NEXT: vmovdqa %ymm0, %ymm2 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm0 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm0 = xmm2[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vcvttss2si %xmm0, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm0 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm2[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm0, %xmm0 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm2, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 ; X86-AVX-I32-NEXT: vextracti128 $1, %ymm2, %xmm2 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm3 = xmm2[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm2, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm4 ; X86-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vcvttss2si %xmm4, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm4 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm4, %xmm4 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm4, %xmm2 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm4 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm2 = xmm1[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm1[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm2 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm1, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm1, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 ; X86-AVX-I32-NEXT: vextracti128 $1, %ymm1, %xmm1 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X86-AVX-I32-NEXT: vshufpd {{.*#+}} xmm3 = xmm1[1,0] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vshufps {{.*#+}} xmm5 = xmm1[3,3,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X86-AVX-I32-NEXT: vpsrld $16, %xmm1, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm5 ; X86-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vcvttss2si %xmm5, %ecx ; X86-AVX-I32-NEXT: vmovd %ecx, %xmm5 ; X86-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm6 = xmm1[1,1,3,3] ; X86-AVX-I32-NEXT: vcvtph2ps %xmm6, %xmm6 ; X86-AVX-I32-NEXT: vroundss $4, %xmm6, %xmm6, %xmm6 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm6, %xmm6 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm6, %xmm6 ; X86-AVX-I32-NEXT: vcvttss2si %xmm6, %eax ; X86-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm5, %xmm5 ; X86-AVX-I32-NEXT: vpsrlq $48, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X86-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X86-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 ; X86-AVX-I32-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm3 ; X86-AVX-I32-NEXT: vmovdqa %ymm4, %ymm1 ; X86-AVX-I32-NEXT: retl ; ; X86-FP16-I32-LABEL: lrint_v32f32: ; X86-FP16-I32: # %bb.0: ; X86-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm2 ; X86-FP16-I32-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; X86-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm1 ; X86-FP16-I32-NEXT: vmovaps %zmm2, %zmm0 ; X86-FP16-I32-NEXT: retl ; ; X64-AVX-I32-LABEL: lrint_v32f32: ; X64-AVX-I32: # %bb.0: ; X64-AVX-I32-NEXT: vmovdqa %ymm0, %ymm2 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm0 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm0 = xmm2[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vroundss $4, %xmm0, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vcvttss2si %xmm0, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm0 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm2[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm0, %xmm0 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm2, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm4 = xmm2[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm2, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0 ; X64-AVX-I32-NEXT: vextracti128 $1, %ymm2, %xmm2 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm3 = xmm2[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm4 = xmm2[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm4 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm2, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm4 ; X64-AVX-I32-NEXT: vroundss $4, %xmm4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vcvttss2si %xmm4, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm4 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm5 = xmm2[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm4, %xmm4 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm4, %xmm2 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm4 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm2 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm2 = xmm1[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vroundss $4, %xmm2, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vcvttss2si %xmm2, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm2 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm3 = xmm1[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm2, %xmm2 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm1, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm1, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2 ; X64-AVX-I32-NEXT: vextracti128 $1, %ymm1, %xmm1 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %eax ; X64-AVX-I32-NEXT: vshufpd {{.*#+}} xmm3 = xmm1[1,0] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vroundss $4, %xmm3, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vcvttss2si %xmm3, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm3 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vshufps {{.*#+}} xmm5 = xmm1[3,3,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm3, %xmm3 ; X64-AVX-I32-NEXT: vpsrld $16, %xmm1, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %eax ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm5 ; X64-AVX-I32-NEXT: vroundss $4, %xmm5, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vcvttss2si %xmm5, %ecx ; X64-AVX-I32-NEXT: vmovd %ecx, %xmm5 ; X64-AVX-I32-NEXT: vpinsrd $1, %eax, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vmovshdup {{.*#+}} xmm6 = xmm1[1,1,3,3] ; X64-AVX-I32-NEXT: vcvtph2ps %xmm6, %xmm6 ; X64-AVX-I32-NEXT: vroundss $4, %xmm6, %xmm6, %xmm6 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm6, %xmm6 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm6, %xmm6 ; X64-AVX-I32-NEXT: vcvttss2si %xmm6, %eax ; X64-AVX-I32-NEXT: vpinsrd $2, %eax, %xmm5, %xmm5 ; X64-AVX-I32-NEXT: vpsrlq $48, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vroundss $4, %xmm1, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtps2ph $4, %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvtph2ps %xmm1, %xmm1 ; X64-AVX-I32-NEXT: vcvttss2si %xmm1, %eax ; X64-AVX-I32-NEXT: vpinsrd $3, %eax, %xmm5, %xmm1 ; X64-AVX-I32-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm3 ; X64-AVX-I32-NEXT: vmovdqa %ymm4, %ymm1 ; X64-AVX-I32-NEXT: retq ; ; X64-FP16-I32-LABEL: lrint_v32f32: ; X64-FP16-I32: # %bb.0: ; X64-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm2 ; X64-FP16-I32-NEXT: vextractf64x4 $1, %zmm0, %ymm0 ; X64-FP16-I32-NEXT: vcvtph2dq %ymm0, %zmm1 ; X64-FP16-I32-NEXT: vmovaps %zmm2, %zmm0 ; X64-FP16-I32-NEXT: retq %a = call <32 x iXLen> @llvm.lrint.v32iXLen.v32f32(<32 x half> %x) ret <32 x iXLen> %a } declare <32 x iXLen> @llvm.lrint.v32iXLen.v32f32(<32 x half>)