aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree.cc
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2023-09-06 17:25:49 +0200
committerJakub Jelinek <jakub@redhat.com>2023-09-06 17:28:24 +0200
commit4f4fa2501186e43d115238ae938b3df322c9e02a (patch)
tree175594012bb25f03f599aba0078c082fb98fae81 /gcc/tree.cc
parent6b96de22d6bcadb45530c1898b264e4738afa4fd (diff)
downloadgcc-4f4fa2501186e43d115238ae938b3df322c9e02a.zip
gcc-4f4fa2501186e43d115238ae938b3df322c9e02a.tar.gz
gcc-4f4fa2501186e43d115238ae938b3df322c9e02a.tar.bz2
Middle-end _BitInt support [PR102989]
The following patch introduces the middle-end part of the _BitInt support, a new BITINT_TYPE, handling it where needed, except the lowering pass and sanitizer support. 2023-09-06 Jakub Jelinek <jakub@redhat.com> PR c/102989 * tree.def (BITINT_TYPE): New type. * tree.h (TREE_CHECK6, TREE_NOT_CHECK6): Define. (NUMERICAL_TYPE_CHECK, INTEGRAL_TYPE_P): Include BITINT_TYPE. (BITINT_TYPE_P): Define. (CONSTRUCTOR_BITFIELD_P): Return true even for BLKmode bit-fields if they have BITINT_TYPE type. (tree_check6, tree_not_check6): New inline functions. (any_integral_type_check): Include BITINT_TYPE. (build_bitint_type): Declare. * tree.cc (tree_code_size, wide_int_to_tree_1, cache_integer_cst, build_zero_cst, type_hash_canon_hash, type_cache_hasher::equal, type_hash_canon): Handle BITINT_TYPE. (bitint_type_cache): New variable. (build_bitint_type): New function. (signed_or_unsigned_type_for, verify_type_variant, verify_type): Handle BITINT_TYPE. (tree_cc_finalize): Free bitint_type_cache. * builtins.cc (type_to_class): Handle BITINT_TYPE. (fold_builtin_unordered_cmp): Handle BITINT_TYPE like INTEGER_TYPE. * cfgexpand.cc (expand_debug_expr): Punt on BLKmode BITINT_TYPE INTEGER_CSTs. * convert.cc (convert_to_pointer_1, convert_to_real_1, convert_to_complex_1): Handle BITINT_TYPE like INTEGER_TYPE. (convert_to_integer_1): Likewise. For BITINT_TYPE don't check GET_MODE_PRECISION (TYPE_MODE (type)). * doc/generic.texi (BITINT_TYPE): Document. * doc/tm.texi.in (TARGET_C_BITINT_TYPE_INFO): New. * doc/tm.texi: Regenerated. * dwarf2out.cc (base_type_die, is_base_type, modified_type_die, gen_type_die_with_usage): Handle BITINT_TYPE. (rtl_for_decl_init): Punt on BLKmode BITINT_TYPE INTEGER_CSTs or handle those which fit into shwi. * expr.cc (expand_expr_real_1): Define EXTEND_BITINT macro, reduce to bitfield precision reads from BITINT_TYPE vars, parameters or memory locations. Expand large/huge BITINT_TYPE INTEGER_CSTs into memory. * fold-const.cc (fold_convert_loc, make_range_step): Handle BITINT_TYPE. (extract_muldiv_1): For BITINT_TYPE use TYPE_PRECISION rather than GET_MODE_SIZE (SCALAR_INT_TYPE_MODE). (native_encode_int, native_interpret_int, native_interpret_expr): Handle BITINT_TYPE. * gimple-expr.cc (useless_type_conversion_p): Make BITINT_TYPE to some other integral type or vice versa conversions non-useless. * gimple-fold.cc (gimple_fold_builtin_memset): Punt for BITINT_TYPE. (clear_padding_unit): Mention in comment that _BitInt types don't need to fit either. (clear_padding_bitint_needs_padding_p): New function. (clear_padding_type_may_have_padding_p): Handle BITINT_TYPE. (clear_padding_type): Likewise. * internal-fn.cc (expand_mul_overflow): For unsigned non-mode precision operands force pos_neg? to 1. (expand_MULBITINT, expand_DIVMODBITINT, expand_FLOATTOBITINT, expand_BITINTTOFLOAT): New functions. * internal-fn.def (MULBITINT, DIVMODBITINT, FLOATTOBITINT, BITINTTOFLOAT): New internal functions. * internal-fn.h (expand_MULBITINT, expand_DIVMODBITINT, expand_FLOATTOBITINT, expand_BITINTTOFLOAT): Declare. * match.pd (non-equality compare simplifications from fold_binary): Punt if TYPE_MODE (arg1_type) is BLKmode. * pretty-print.h (pp_wide_int): Handle printing of large precision wide_ints which would buffer overflow digit_buffer. * stor-layout.cc (finish_bitfield_representative): For bit-fields with BITINT_TYPE, prefer representatives with precisions in multiple of limb precision. (layout_type): Handle BITINT_TYPE. Handle COMPLEX_TYPE with BLKmode element type and assert it is BITINT_TYPE. * target.def (bitint_type_info): New C target hook. * target.h (struct bitint_info): New type. * targhooks.cc (default_bitint_type_info): New function. * targhooks.h (default_bitint_type_info): Declare. * tree-pretty-print.cc (dump_generic_node): Handle BITINT_TYPE. Handle printing large wide_ints which would buffer overflow digit_buffer. * tree-ssa-sccvn.cc: Include target.h. (eliminate_dom_walker::eliminate_stmt): Punt for large/huge BITINT_TYPE. * tree-switch-conversion.cc (jump_table_cluster::emit): For more than 64-bit BITINT_TYPE subtract low bound from expression and cast to 64-bit integer type both the controlling expression and case labels. * typeclass.h (enum type_class): Add bitint_type_class enumerator. * varasm.cc (output_constant): Handle BITINT_TYPE INTEGER_CSTs. * vr-values.cc (check_for_binary_op_overflow): Use widest2_int rather than widest_int. (simplify_using_ranges::simplify_internal_call_using_ranges): Use unsigned_type_for rather than build_nonstandard_integer_type.
Diffstat (limited to 'gcc/tree.cc')
-rw-r--r--gcc/tree.cc67
1 files changed, 64 insertions, 3 deletions
diff --git a/gcc/tree.cc b/gcc/tree.cc
index 420857b..9651ee0 100644
--- a/gcc/tree.cc
+++ b/gcc/tree.cc
@@ -991,6 +991,7 @@ tree_code_size (enum tree_code code)
case VOID_TYPE:
case FUNCTION_TYPE:
case METHOD_TYPE:
+ case BITINT_TYPE:
case LANG_TYPE: return sizeof (tree_type_non_common);
default:
gcc_checking_assert (code >= NUM_TREE_CODES);
@@ -1732,6 +1733,7 @@ wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
case INTEGER_TYPE:
case OFFSET_TYPE:
+ case BITINT_TYPE:
if (TYPE_SIGN (type) == UNSIGNED)
{
/* Cache [0, N). */
@@ -1915,6 +1917,7 @@ cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
case INTEGER_TYPE:
case OFFSET_TYPE:
+ case BITINT_TYPE:
if (TYPE_UNSIGNED (type))
{
/* Cache 0..N */
@@ -2637,7 +2640,7 @@ build_zero_cst (tree type)
{
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
- case OFFSET_TYPE: case NULLPTR_TYPE:
+ case OFFSET_TYPE: case NULLPTR_TYPE: case BITINT_TYPE:
return build_int_cst (type, 0);
case REAL_TYPE:
@@ -6053,7 +6056,16 @@ type_hash_canon_hash (tree type)
hstate.add_object (TREE_INT_CST_ELT (t, i));
break;
}
-
+
+ case BITINT_TYPE:
+ {
+ unsigned prec = TYPE_PRECISION (type);
+ unsigned uns = TYPE_UNSIGNED (type);
+ hstate.add_object (prec);
+ hstate.add_int (uns);
+ break;
+ }
+
case REAL_TYPE:
case FIXED_POINT_TYPE:
{
@@ -6136,6 +6148,11 @@ type_cache_hasher::equal (type_hash *a, type_hash *b)
|| tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
TYPE_MIN_VALUE (b->type))));
+ case BITINT_TYPE:
+ if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
+ return false;
+ return TYPE_UNSIGNED (a->type) == TYPE_UNSIGNED (b->type);
+
case FIXED_POINT_TYPE:
return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
@@ -6236,7 +6253,7 @@ type_hash_canon (unsigned int hashcode, tree type)
/* Free also min/max values and the cache for integer
types. This can't be done in free_node, as LTO frees
those on its own. */
- if (TREE_CODE (type) == INTEGER_TYPE)
+ if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == BITINT_TYPE)
{
if (TYPE_MIN_VALUE (type)
&& TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
@@ -7154,6 +7171,44 @@ build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
return type;
}
+static GTY(()) vec<tree, va_gc> *bitint_type_cache;
+
+/* Builds a signed or unsigned _BitInt(PRECISION) type. */
+tree
+build_bitint_type (unsigned HOST_WIDE_INT precision, int unsignedp)
+{
+ tree itype, ret;
+
+ if (unsignedp)
+ unsignedp = MAX_INT_CACHED_PREC + 1;
+
+ if (bitint_type_cache == NULL)
+ vec_safe_grow_cleared (bitint_type_cache, 2 * MAX_INT_CACHED_PREC + 2);
+
+ if (precision <= MAX_INT_CACHED_PREC)
+ {
+ itype = (*bitint_type_cache)[precision + unsignedp];
+ if (itype)
+ return itype;
+ }
+
+ itype = make_node (BITINT_TYPE);
+ TYPE_PRECISION (itype) = precision;
+
+ if (unsignedp)
+ fixup_unsigned_type (itype);
+ else
+ fixup_signed_type (itype);
+
+ inchash::hash hstate;
+ inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
+ ret = type_hash_canon (hstate.end (), itype);
+ if (precision <= MAX_INT_CACHED_PREC)
+ (*bitint_type_cache)[precision + unsignedp] = ret;
+
+ return ret;
+}
+
/* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
is true, reuse such a type that has already been constructed. */
@@ -11041,6 +11096,8 @@ signed_or_unsigned_type_for (int unsignedp, tree type)
else
return NULL_TREE;
+ if (TREE_CODE (type) == BITINT_TYPE)
+ return build_bitint_type (bits, unsignedp);
return build_nonstandard_integer_type (bits, unsignedp);
}
@@ -13462,6 +13519,7 @@ verify_type_variant (const_tree t, tree tv)
if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
|| TREE_CODE (t) == INTEGER_TYPE
|| TREE_CODE (t) == BOOLEAN_TYPE
+ || TREE_CODE (t) == BITINT_TYPE
|| SCALAR_FLOAT_TYPE_P (t)
|| FIXED_POINT_TYPE_P (t))
{
@@ -14201,6 +14259,7 @@ verify_type (const_tree t)
}
else if (TREE_CODE (t) == INTEGER_TYPE
|| TREE_CODE (t) == BOOLEAN_TYPE
+ || TREE_CODE (t) == BITINT_TYPE
|| TREE_CODE (t) == OFFSET_TYPE
|| TREE_CODE (t) == REFERENCE_TYPE
|| TREE_CODE (t) == NULLPTR_TYPE
@@ -14260,6 +14319,7 @@ verify_type (const_tree t)
}
if (TREE_CODE (t) != INTEGER_TYPE
&& TREE_CODE (t) != BOOLEAN_TYPE
+ && TREE_CODE (t) != BITINT_TYPE
&& TREE_CODE (t) != OFFSET_TYPE
&& TREE_CODE (t) != REFERENCE_TYPE
&& TREE_CODE (t) != NULLPTR_TYPE
@@ -15035,6 +15095,7 @@ void
tree_cc_finalize (void)
{
clear_nonstandard_integer_type_cache ();
+ vec_free (bitint_type_cache);
}
#if CHECKING_P