aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2022-01-18 09:06:13 +0100
committerRichard Biener <rguenther@suse.de>2022-01-18 10:27:56 +0100
commit00dc7877ee02634d4837d024e36b55bef6b9d49c (patch)
treef0a89f33e9515536acaac397db675ba7fd87466f
parentff7aeceb6b3a476c3bac66a7f39a5ef4240206fc (diff)
downloadgcc-00dc7877ee02634d4837d024e36b55bef6b9d49c.zip
gcc-00dc7877ee02634d4837d024e36b55bef6b9d49c.tar.gz
gcc-00dc7877ee02634d4837d024e36b55bef6b9d49c.tar.bz2
tree-optimization/104064 - UBSAN issue in vect dataref analysis
Since we order DRs after DR_INIT we know the difference will be positive and thus can avoid signed overflow issues by using unsigned arithmetic to produce the known unsigned result. 2022-01-18 Richard Biener <rguenther@suse.de> PR tree-optimization/104064 * tree-vect-data-refs.cc (vect_analyze_data_ref_accesses): Check DR_INIT fits in a signed HWI, represent the difference from the first DR in unsigned.
-rw-r--r--gcc/tree-vect-data-refs.cc15
1 files changed, 9 insertions, 6 deletions
diff --git a/gcc/tree-vect-data-refs.cc b/gcc/tree-vect-data-refs.cc
index dd20ed9..09223ba 100644
--- a/gcc/tree-vect-data-refs.cc
+++ b/gcc/tree-vect-data-refs.cc
@@ -3172,8 +3172,8 @@ vect_analyze_data_ref_accesses (vec_info *vinfo,
break;
/* Check that the DR_INITs are compile-time constants. */
- if (TREE_CODE (DR_INIT (dra)) != INTEGER_CST
- || TREE_CODE (DR_INIT (drb)) != INTEGER_CST)
+ if (!tree_fits_shwi_p (DR_INIT (dra))
+ || !tree_fits_shwi_p (DR_INIT (drb)))
break;
/* Different .GOMP_SIMD_LANE calls still give the same lane,
@@ -3203,15 +3203,18 @@ vect_analyze_data_ref_accesses (vec_info *vinfo,
{
/* If init_b == init_a + the size of the type * k, we have an
interleaving, and DRA is accessed before DRB. */
- HOST_WIDE_INT type_size_a = tree_to_uhwi (sza);
+ unsigned HOST_WIDE_INT type_size_a = tree_to_uhwi (sza);
if (type_size_a == 0
- || (init_b - init_a) % type_size_a != 0)
+ || (((unsigned HOST_WIDE_INT)init_b - init_a)
+ % type_size_a != 0))
break;
/* If we have a store, the accesses are adjacent. This splits
groups into chunks we support (we don't support vectorization
of stores with gaps). */
- if (!DR_IS_READ (dra) && init_b - init_prev != type_size_a)
+ if (!DR_IS_READ (dra)
+ && (((unsigned HOST_WIDE_INT)init_b - init_prev)
+ != type_size_a))
break;
/* If the step (if not zero or non-constant) is smaller than the
@@ -3222,7 +3225,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo,
unsigned HOST_WIDE_INT step
= absu_hwi (tree_to_shwi (DR_STEP (dra)));
if (step != 0
- && step <= (unsigned HOST_WIDE_INT)(init_b - init_a))
+ && step <= ((unsigned HOST_WIDE_INT)init_b - init_a))
break;
}
}