aboutsummaryrefslogtreecommitdiff
path: root/gcc/fortran/trans-intrinsic.c
diff options
context:
space:
mode:
authorHarald Anlauf <anlauf@gmx.de>2020-09-21 21:50:36 +0200
committerHarald Anlauf <anlauf@gmx.de>2020-09-21 21:50:36 +0200
commit5c5ce6099082b642294091c83461c928bd028ea1 (patch)
tree03fa12b75d3c0a820e83d45d9a270a832f3fb347 /gcc/fortran/trans-intrinsic.c
parent762c16eba6b815090c56564a293cd059aea2e1d6 (diff)
downloadgcc-5c5ce6099082b642294091c83461c928bd028ea1.zip
gcc-5c5ce6099082b642294091c83461c928bd028ea1.tar.gz
gcc-5c5ce6099082b642294091c83461c928bd028ea1.tar.bz2
PR fortran/90903 [part2] - Add runtime checking for the MVBITS intrinsic
Implement inline expansion of the intrinsic elemental subroutine MVBITS with optional runtime checks for valid argument range. gcc/fortran/ChangeLog: * iresolve.c (gfc_resolve_mvbits): Remove unneeded conversion of FROMPOS, LEN and TOPOS arguments to fit a C int. * trans-intrinsic.c (gfc_conv_intrinsic_mvbits): Add inline expansion of MVBITS intrinsic elemental subroutine and add code for runtime argument checking. (gfc_conv_intrinsic_subroutine): Recognise MVBITS intrinsic, but defer handling to gfc_trans_call. * trans-stmt.c (replace_ss): (gfc_trans_call): Adjust to handle inline expansion, scalarization of intrinsic subroutine MVBITS in gfc_conv_intrinsic_mvbits. * trans.h (gfc_conv_intrinsic_mvbits): Add prototype for gfc_conv_intrinsic_mvbits. gcc/testsuite/ChangeLog: * gfortran.dg/check_bits_2.f90: New test. Co-authored-by: Paul Thomas <pault@gcc.gnu.org>
Diffstat (limited to 'gcc/fortran/trans-intrinsic.c')
-rw-r--r--gcc/fortran/trans-intrinsic.c167
1 files changed, 167 insertions, 0 deletions
diff --git a/gcc/fortran/trans-intrinsic.c b/gcc/fortran/trans-intrinsic.c
index 32fe988..3b3bd86 100644
--- a/gcc/fortran/trans-intrinsic.c
+++ b/gcc/fortran/trans-intrinsic.c
@@ -11790,6 +11790,169 @@ conv_intrinsic_event_query (gfc_code *code)
return gfc_finish_block (&se.pre);
}
+
+/* This is a peculiar case because of the need to do dependency checking.
+ It is called via trans-stmt.c(gfc_trans_call), where it is picked out as
+ a special case and this function called instead of
+ gfc_conv_procedure_call. */
+void
+gfc_conv_intrinsic_mvbits (gfc_se *se, gfc_actual_arglist *actual_args,
+ gfc_loopinfo *loop)
+{
+ gfc_actual_arglist *actual;
+ gfc_se argse[5];
+ gfc_expr *arg[5];
+ gfc_ss *lss;
+ int n;
+
+ tree from, frompos, len, to, topos;
+ tree lenmask, oldbits, newbits, bitsize;
+ tree type, utype, above, mask1, mask2;
+
+ if (loop)
+ lss = loop->ss;
+ else
+ lss = gfc_ss_terminator;
+
+ actual = actual_args;
+ for (n = 0; n < 5; n++, actual = actual->next)
+ {
+ arg[n] = actual->expr;
+ gfc_init_se (&argse[n], NULL);
+
+ if (lss != gfc_ss_terminator)
+ {
+ gfc_copy_loopinfo_to_se (&argse[n], loop);
+ /* Find the ss for the expression if it is there. */
+ argse[n].ss = lss;
+ gfc_mark_ss_chain_used (lss, 1);
+ }
+
+ gfc_conv_expr (&argse[n], arg[n]);
+
+ if (loop)
+ lss = argse[n].ss;
+ }
+
+ from = argse[0].expr;
+ frompos = argse[1].expr;
+ len = argse[2].expr;
+ to = argse[3].expr;
+ topos = argse[4].expr;
+
+ /* The type of the result (TO). */
+ type = TREE_TYPE (to);
+ bitsize = build_int_cst (integer_type_node, TYPE_PRECISION (type));
+
+ /* Optionally generate code for runtime argument check. */
+ if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+ {
+ tree nbits, below, ccond;
+ tree fp = fold_convert (long_integer_type_node, frompos);
+ tree ln = fold_convert (long_integer_type_node, len);
+ tree tp = fold_convert (long_integer_type_node, topos);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, frompos,
+ build_int_cst (TREE_TYPE (frompos), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, frompos,
+ fold_convert (TREE_TYPE (frompos), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+ &arg[1]->where,
+ "FROMPOS argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", fp, bitsize);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, len,
+ build_int_cst (TREE_TYPE (len), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, len,
+ fold_convert (TREE_TYPE (len), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[2].pre,
+ &arg[2]->where,
+ "LEN argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", ln, bitsize);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, topos,
+ build_int_cst (TREE_TYPE (topos), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, topos,
+ fold_convert (TREE_TYPE (topos), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+ &arg[4]->where,
+ "TOPOS argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", tp, bitsize);
+
+ /* The tests above ensure that FROMPOS, LEN and TOPOS fit into short
+ integers. Additions below cannot overflow. */
+ nbits = fold_convert (long_integer_type_node, bitsize);
+ above = fold_build2_loc (input_location, PLUS_EXPR,
+ long_integer_type_node, fp, ln);
+ ccond = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, above, nbits);
+ gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+ &arg[1]->where,
+ "FROMPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+ "in intrinsic MVBITS", fp, ln, bitsize);
+ above = fold_build2_loc (input_location, PLUS_EXPR,
+ long_integer_type_node, tp, ln);
+ ccond = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, above, nbits);
+ gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+ &arg[4]->where,
+ "TOPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+ "in intrinsic MVBITS", tp, ln, bitsize);
+ }
+
+ for (n = 0; n < 5; n++)
+ {
+ gfc_add_block_to_block (&se->pre, &argse[n].pre);
+ gfc_add_block_to_block (&se->post, &argse[n].post);
+ }
+
+ /* lenmask = (LEN >= bit_size (TYPE)) ? ~(TYPE)0 : ((TYPE)1 << LEN) - 1 */
+ above = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
+ len, fold_convert (TREE_TYPE (len), bitsize));
+ mask1 = build_int_cst (type, -1);
+ mask2 = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ build_int_cst (type, 1), len);
+ mask2 = fold_build2_loc (input_location, MINUS_EXPR, type,
+ mask2, build_int_cst (type, 1));
+ lenmask = fold_build3_loc (input_location, COND_EXPR, type,
+ above, mask1, mask2);
+
+ /* newbits = (((UTYPE)(FROM) >> FROMPOS) & lenmask) << TOPOS.
+ * For valid frompos+len <= bit_size(FROM) the conversion to unsigned is
+ * not strictly necessary; artificial bits from rshift will be masked. */
+ utype = unsigned_type_for (type);
+ newbits = fold_build2_loc (input_location, RSHIFT_EXPR, utype,
+ fold_convert (utype, from), frompos);
+ newbits = fold_build2_loc (input_location, BIT_AND_EXPR, type,
+ fold_convert (type, newbits), lenmask);
+ newbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ newbits, topos);
+
+ /* oldbits = TO & (~(lenmask << TOPOS)). */
+ oldbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ lenmask, topos);
+ oldbits = fold_build1_loc (input_location, BIT_NOT_EXPR, type, oldbits);
+ oldbits = fold_build2_loc (input_location, BIT_AND_EXPR, type, oldbits, to);
+
+ /* TO = newbits | oldbits. */
+ se->expr = fold_build2_loc (input_location, BIT_IOR_EXPR, type,
+ oldbits, newbits);
+
+ /* Return the assignment. */
+ se->expr = fold_build2_loc (input_location, MODIFY_EXPR,
+ void_type_node, to, se->expr);
+}
+
+
static tree
conv_intrinsic_move_alloc (gfc_code *code)
{
@@ -12119,6 +12282,10 @@ gfc_conv_intrinsic_subroutine (gfc_code *code)
res = conv_intrinsic_kill_sub (code);
break;
+ case GFC_ISYM_MVBITS:
+ res = NULL_TREE;
+ break;
+
case GFC_ISYM_SYSTEM_CLOCK:
res = conv_intrinsic_system_clock (code);
break;