return gfc_finish_block (&se.pre);
}
+
+/* This is a peculiar case because of the need to do dependency checking.
+ It is called via trans-stmt.c(gfc_trans_call), where it is picked out as
+ a special case and this function called instead of
+ gfc_conv_procedure_call. */
+void
+gfc_conv_intrinsic_mvbits (gfc_se *se, gfc_actual_arglist *actual_args,
+ gfc_loopinfo *loop)
+{
+ gfc_actual_arglist *actual;
+ gfc_se argse[5];
+ gfc_expr *arg[5];
+ gfc_ss *lss;
+ int n;
+
+ tree from, frompos, len, to, topos;
+ tree lenmask, oldbits, newbits, bitsize;
+ tree type, utype, above, mask1, mask2;
+
+ if (loop)
+ lss = loop->ss;
+ else
+ lss = gfc_ss_terminator;
+
+ actual = actual_args;
+ for (n = 0; n < 5; n++, actual = actual->next)
+ {
+ arg[n] = actual->expr;
+ gfc_init_se (&argse[n], NULL);
+
+ if (lss != gfc_ss_terminator)
+ {
+ gfc_copy_loopinfo_to_se (&argse[n], loop);
+ /* Find the ss for the expression if it is there. */
+ argse[n].ss = lss;
+ gfc_mark_ss_chain_used (lss, 1);
+ }
+
+ gfc_conv_expr (&argse[n], arg[n]);
+
+ if (loop)
+ lss = argse[n].ss;
+ }
+
+ from = argse[0].expr;
+ frompos = argse[1].expr;
+ len = argse[2].expr;
+ to = argse[3].expr;
+ topos = argse[4].expr;
+
+ /* The type of the result (TO). */
+ type = TREE_TYPE (to);
+ bitsize = build_int_cst (integer_type_node, TYPE_PRECISION (type));
+
+ /* Optionally generate code for runtime argument check. */
+ if (gfc_option.rtcheck & GFC_RTCHECK_BITS)
+ {
+ tree nbits, below, ccond;
+ tree fp = fold_convert (long_integer_type_node, frompos);
+ tree ln = fold_convert (long_integer_type_node, len);
+ tree tp = fold_convert (long_integer_type_node, topos);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, frompos,
+ build_int_cst (TREE_TYPE (frompos), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, frompos,
+ fold_convert (TREE_TYPE (frompos), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+ &arg[1]->where,
+ "FROMPOS argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", fp, bitsize);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, len,
+ build_int_cst (TREE_TYPE (len), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, len,
+ fold_convert (TREE_TYPE (len), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[2].pre,
+ &arg[2]->where,
+ "LEN argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", ln, bitsize);
+ below = fold_build2_loc (input_location, LT_EXPR,
+ logical_type_node, topos,
+ build_int_cst (TREE_TYPE (topos), 0));
+ above = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, topos,
+ fold_convert (TREE_TYPE (topos), bitsize));
+ ccond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
+ logical_type_node, below, above);
+ gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+ &arg[4]->where,
+ "TOPOS argument (%ld) out of range 0:%d "
+ "in intrinsic MVBITS", tp, bitsize);
+
+ /* The tests above ensure that FROMPOS, LEN and TOPOS fit into short
+ integers. Additions below cannot overflow. */
+ nbits = fold_convert (long_integer_type_node, bitsize);
+ above = fold_build2_loc (input_location, PLUS_EXPR,
+ long_integer_type_node, fp, ln);
+ ccond = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, above, nbits);
+ gfc_trans_runtime_check (true, false, ccond, &argse[1].pre,
+ &arg[1]->where,
+ "FROMPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+ "in intrinsic MVBITS", fp, ln, bitsize);
+ above = fold_build2_loc (input_location, PLUS_EXPR,
+ long_integer_type_node, tp, ln);
+ ccond = fold_build2_loc (input_location, GT_EXPR,
+ logical_type_node, above, nbits);
+ gfc_trans_runtime_check (true, false, ccond, &argse[4].pre,
+ &arg[4]->where,
+ "TOPOS(%ld)+LEN(%ld)>BIT_SIZE(%d) "
+ "in intrinsic MVBITS", tp, ln, bitsize);
+ }
+
+ for (n = 0; n < 5; n++)
+ {
+ gfc_add_block_to_block (&se->pre, &argse[n].pre);
+ gfc_add_block_to_block (&se->post, &argse[n].post);
+ }
+
+ /* lenmask = (LEN >= bit_size (TYPE)) ? ~(TYPE)0 : ((TYPE)1 << LEN) - 1 */
+ above = fold_build2_loc (input_location, GE_EXPR, logical_type_node,
+ len, fold_convert (TREE_TYPE (len), bitsize));
+ mask1 = build_int_cst (type, -1);
+ mask2 = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ build_int_cst (type, 1), len);
+ mask2 = fold_build2_loc (input_location, MINUS_EXPR, type,
+ mask2, build_int_cst (type, 1));
+ lenmask = fold_build3_loc (input_location, COND_EXPR, type,
+ above, mask1, mask2);
+
+ /* newbits = (((UTYPE)(FROM) >> FROMPOS) & lenmask) << TOPOS.
+ * For valid frompos+len <= bit_size(FROM) the conversion to unsigned is
+ * not strictly necessary; artificial bits from rshift will be masked. */
+ utype = unsigned_type_for (type);
+ newbits = fold_build2_loc (input_location, RSHIFT_EXPR, utype,
+ fold_convert (utype, from), frompos);
+ newbits = fold_build2_loc (input_location, BIT_AND_EXPR, type,
+ fold_convert (type, newbits), lenmask);
+ newbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ newbits, topos);
+
+ /* oldbits = TO & (~(lenmask << TOPOS)). */
+ oldbits = fold_build2_loc (input_location, LSHIFT_EXPR, type,
+ lenmask, topos);
+ oldbits = fold_build1_loc (input_location, BIT_NOT_EXPR, type, oldbits);
+ oldbits = fold_build2_loc (input_location, BIT_AND_EXPR, type, oldbits, to);
+
+ /* TO = newbits | oldbits. */
+ se->expr = fold_build2_loc (input_location, BIT_IOR_EXPR, type,
+ oldbits, newbits);
+
+ /* Return the assignment. */
+ se->expr = fold_build2_loc (input_location, MODIFY_EXPR,
+ void_type_node, to, se->expr);
+}
+
+
static tree
conv_intrinsic_move_alloc (gfc_code *code)
{
res = conv_intrinsic_kill_sub (code);
break;
+ case GFC_ISYM_MVBITS:
+ res = NULL_TREE;
+ break;
+
case GFC_ISYM_SYSTEM_CLOCK:
res = conv_intrinsic_system_clock (code);
break;
*sess = new_ss;
new_ss->next = old_ss->next;
+ /* Make sure that trailing references are not lost. */
+ if (old_ss->info
+ && old_ss->info->data.array.ref
+ && old_ss->info->data.array.ref->next
+ && !(new_ss->info->data.array.ref
+ && new_ss->info->data.array.ref->next))
+ new_ss->info->data.array.ref = old_ss->info->data.array.ref;
for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
loopss = &((*loopss)->loop_chain))
tree index = NULL_TREE;
tree maskexpr = NULL_TREE;
tree tmp;
+ bool is_intrinsic_mvbits;
/* A CALL starts a new block because the actual arguments may have to
be evaluated first. */
get_proc_ifc_for_call (code),
GFC_SS_REFERENCE);
+ /* MVBITS is inlined but needs the dependency checking found here. */
+ is_intrinsic_mvbits = code->resolved_isym
+ && code->resolved_isym->id == GFC_ISYM_MVBITS;
+
/* Is not an elemental subroutine call with array valued arguments. */
if (ss == gfc_ss_terminator)
{
- /* Translate the call. */
- has_alternate_specifier
- = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
- code->expr1, NULL);
+ if (is_intrinsic_mvbits)
+ {
+ has_alternate_specifier = 0;
+ gfc_conv_intrinsic_mvbits (&se, code->ext.actual, NULL);
+ }
+ else
+ {
+ /* Translate the call. */
+ has_alternate_specifier =
+ gfc_conv_procedure_call (&se, code->resolved_sym,
+ code->ext.actual, code->expr1, NULL);
- /* A subroutine without side-effect, by definition, does nothing! */
- TREE_SIDE_EFFECTS (se.expr) = 1;
+ /* A subroutine without side-effect, by definition, does nothing! */
+ TREE_SIDE_EFFECTS (se.expr) = 1;
+ }
/* Chain the pieces together and return the block. */
if (has_alternate_specifier)
TREE_TYPE (maskexpr), maskexpr);
}
- /* Add the subroutine call to the block. */
- gfc_conv_procedure_call (&loopse, code->resolved_sym,
- code->ext.actual, code->expr1,
- NULL);
+ if (is_intrinsic_mvbits)
+ {
+ has_alternate_specifier = 0;
+ gfc_conv_intrinsic_mvbits (&loopse, code->ext.actual, &loop);
+ }
+ else
+ {
+ /* Add the subroutine call to the block. */
+ gfc_conv_procedure_call (&loopse, code->resolved_sym,
+ code->ext.actual, code->expr1,
+ NULL);
+ }
if (mask && count1)
{