aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorTamar Christina <tamar.christina@arm.com>2023-10-20 08:08:54 +0100
committerTamar Christina <tamar.christina@arm.com>2023-10-20 08:08:54 +0100
commitdd3e6f52e406a0bab787aebcb384ef6e2cf7a1e8 (patch)
tree6a090a8804e7126012b6d0654e577a46eb9f705a /gcc
parent8ba8f0dea0e6ddc1d06cb96818ffe24500983d0c (diff)
downloadgcc-dd3e6f52e406a0bab787aebcb384ef6e2cf7a1e8.zip
gcc-dd3e6f52e406a0bab787aebcb384ef6e2cf7a1e8.tar.gz
gcc-dd3e6f52e406a0bab787aebcb384ef6e2cf7a1e8.tar.bz2
middle-end: Enable bit-field vectorization to work correctly when we're vectoring inside conds
The bitfield vectorization support does not currently recognize bitfields inside gconds. This means they can't be used as conditions for early break vectorization which is a functionality we require. This adds support for them by explicitly matching and handling gcond as a source. Testcases are added in the testsuite update patch as the only way to get there is with the early break vectorization. See tests: - vect-early-break_20.c - vect-early-break_21.c gcc/ChangeLog: * tree-vect-patterns.cc (vect_init_pattern_stmt): Copy STMT_VINFO_TYPE from original statement. (vect_recog_bitfield_ref_pattern): Support bitfields in gcond. Co-Authored-By: Andre Vieira <andre.simoesdiasvieira@arm.com>
Diffstat (limited to 'gcc')
-rw-r--r--gcc/tree-vect-patterns.cc66
1 files changed, 49 insertions, 17 deletions
diff --git a/gcc/tree-vect-patterns.cc b/gcc/tree-vect-patterns.cc
index 6964c99..6b6b412 100644
--- a/gcc/tree-vect-patterns.cc
+++ b/gcc/tree-vect-patterns.cc
@@ -128,6 +128,7 @@ vect_init_pattern_stmt (vec_info *vinfo, gimple *pattern_stmt,
STMT_VINFO_RELATED_STMT (pattern_stmt_info) = orig_stmt_info;
STMT_VINFO_DEF_TYPE (pattern_stmt_info)
= STMT_VINFO_DEF_TYPE (orig_stmt_info);
+ STMT_VINFO_TYPE (pattern_stmt_info) = STMT_VINFO_TYPE (orig_stmt_info);
if (!STMT_VINFO_VECTYPE (pattern_stmt_info))
{
gcc_assert (!vectype
@@ -2539,6 +2540,10 @@ vect_recog_widen_sum_pattern (vec_info *vinfo,
bf_value = BIT_FIELD_REF (container, bitsize, bitpos);
result = (type_out) bf_value;
+ or
+
+ if (BIT_FIELD_REF (container, bitsize, bitpos) `cmp` <constant>)
+
where type_out is a non-bitfield type, that is to say, it's precision matches
2^(TYPE_SIZE(type_out) - (TYPE_UNSIGNED (type_out) ? 1 : 2)).
@@ -2548,6 +2553,10 @@ vect_recog_widen_sum_pattern (vec_info *vinfo,
here it starts with:
result = (type_out) bf_value;
+ or
+
+ if (BIT_FIELD_REF (container, bitsize, bitpos) `cmp` <constant>)
+
Output:
* TYPE_OUT: The vector type of the output of this pattern.
@@ -2589,33 +2598,45 @@ vect_recog_widen_sum_pattern (vec_info *vinfo,
The shifting is always optional depending on whether bitpos != 0.
+ When the original bitfield was inside a gcond then an new gcond is also
+ generated with the newly `result` as the operand to the comparison.
+
*/
static gimple *
vect_recog_bitfield_ref_pattern (vec_info *vinfo, stmt_vec_info stmt_info,
tree *type_out)
{
- gassign *first_stmt = dyn_cast <gassign *> (stmt_info->stmt);
-
- if (!first_stmt)
- return NULL;
-
- gassign *bf_stmt;
- if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (first_stmt))
- && TREE_CODE (gimple_assign_rhs1 (first_stmt)) == SSA_NAME)
+ gimple *bf_stmt = NULL;
+ tree lhs = NULL_TREE;
+ tree ret_type = NULL_TREE;
+ gimple *stmt = STMT_VINFO_STMT (stmt_info);
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
+ {
+ tree op = gimple_cond_lhs (cond_stmt);
+ if (TREE_CODE (op) != SSA_NAME)
+ return NULL;
+ bf_stmt = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (op));
+ if (TREE_CODE (gimple_cond_rhs (cond_stmt)) != INTEGER_CST)
+ return NULL;
+ }
+ else if (is_gimple_assign (stmt)
+ && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
{
- gimple *second_stmt
- = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (first_stmt));
+ gimple *second_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
bf_stmt = dyn_cast <gassign *> (second_stmt);
- if (!bf_stmt
- || gimple_assign_rhs_code (bf_stmt) != BIT_FIELD_REF)
- return NULL;
+ lhs = gimple_assign_lhs (stmt);
+ ret_type = TREE_TYPE (lhs);
}
- else
+
+ if (!bf_stmt
+ || gimple_assign_rhs_code (bf_stmt) != BIT_FIELD_REF)
return NULL;
tree bf_ref = gimple_assign_rhs1 (bf_stmt);
tree container = TREE_OPERAND (bf_ref, 0);
+ ret_type = ret_type ? ret_type : TREE_TYPE (container);
if (!bit_field_offset (bf_ref).is_constant ()
|| !bit_field_size (bf_ref).is_constant ()
@@ -2629,8 +2650,6 @@ vect_recog_bitfield_ref_pattern (vec_info *vinfo, stmt_vec_info stmt_info,
gimple *use_stmt, *pattern_stmt;
use_operand_p use_p;
- tree ret = gimple_assign_lhs (first_stmt);
- tree ret_type = TREE_TYPE (ret);
bool shift_first = true;
tree container_type = TREE_TYPE (container);
tree vectype = get_vectype_for_scalar_type (vinfo, container_type);
@@ -2675,7 +2694,7 @@ vect_recog_bitfield_ref_pattern (vec_info *vinfo, stmt_vec_info stmt_info,
/* If the only use of the result of this BIT_FIELD_REF + CONVERT is a
PLUS_EXPR then do the shift last as some targets can combine the shift and
add into a single instruction. */
- if (single_imm_use (gimple_assign_lhs (first_stmt), &use_p, &use_stmt))
+ if (lhs && single_imm_use (lhs, &use_p, &use_stmt))
{
if (gimple_code (use_stmt) == GIMPLE_ASSIGN
&& gimple_assign_rhs_code (use_stmt) == PLUS_EXPR)
@@ -2748,6 +2767,19 @@ vect_recog_bitfield_ref_pattern (vec_info *vinfo, stmt_vec_info stmt_info,
NOP_EXPR, result);
}
+ if (!lhs)
+ {
+ append_pattern_def_seq (vinfo, stmt_info, pattern_stmt, vectype);
+ gcond *cond_stmt = dyn_cast <gcond *> (stmt_info->stmt);
+ tree cond_cst = gimple_cond_rhs (cond_stmt);
+ pattern_stmt
+ = gimple_build_cond (gimple_cond_code (cond_stmt),
+ gimple_get_lhs (pattern_stmt),
+ fold_convert (ret_type, cond_cst),
+ gimple_cond_true_label (cond_stmt),
+ gimple_cond_false_label (cond_stmt));
+ }
+
*type_out = STMT_VINFO_VECTYPE (stmt_info);
vect_pattern_detected ("bitfield_ref pattern", stmt_info->stmt);