/* Convert RTL to assembler code and output it, for GNU compiler.
- Copyright (C) 1987-2013 Free Software Foundation, Inc.
+ Copyright (C) 1987-2015 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
-
-#include "tree.h"
+#include "backend.h"
+#include "target.h"
#include "rtl.h"
+#include "tree.h"
+#include "cfghooks.h"
+#include "df.h"
#include "tm_p.h"
-#include "regs.h"
#include "insn-config.h"
-#include "insn-attr.h"
+#include "regs.h"
+#include "emit-rtl.h"
#include "recog.h"
+#include "cgraph.h"
+#include "tree-pretty-print.h" /* for dump_function_header */
+#include "varasm.h"
+#include "insn-attr.h"
#include "conditions.h"
#include "flags.h"
-#include "hard-reg-set.h"
#include "output.h"
#include "except.h"
-#include "function.h"
#include "rtl-error.h"
#include "toplev.h" /* exact_log2, floor_log2 */
#include "reload.h"
#include "intl.h"
-#include "basic-block.h"
-#include "target.h"
-#include "targhooks.h"
+#include "cfgrtl.h"
#include "debug.h"
-#include "expr.h"
#include "tree-pass.h"
-#include "tree-flow.h"
-#include "cgraph.h"
-#include "coverage.h"
-#include "df.h"
-#include "ggc.h"
+#include "tree-ssa.h"
#include "cfgloop.h"
#include "params.h"
-#include "tree-pretty-print.h" /* for dump_function_header */
+#include "asan.h"
+#include "rtl-iter.h"
+#include "print-rtl.h"
#ifdef XCOFF_DEBUGGING_INFO
-#include "xcoffout.h" /* Needed for external data
- declarations for e.g. AIX 4.x. */
+#include "xcoffout.h" /* Needed for external data declarations. */
#endif
#include "dwarf2out.h"
#endif
/* Bitflags used by final_scan_insn. */
-#define SEEN_BB 1
-#define SEEN_NOTE 2
-#define SEEN_EMITTED 4
+#define SEEN_NOTE 1
+#define SEEN_EMITTED 2
/* Last insn processed by final_scan_insn. */
-static rtx debug_insn;
-rtx current_output_insn;
+static rtx_insn *debug_insn;
+rtx_insn *current_output_insn;
/* Line number of last NOTE. */
static int last_linenum;
/* Nonzero while outputting an `asm' with operands.
This means that inconsistencies are the user's fault, so don't die.
The precise value is the insn being output, to pass to error_for_asm. */
-rtx this_is_asm_operands;
+const rtx_insn *this_is_asm_operands;
/* Number of operands of this insn, for an `asm' with operands. */
static unsigned int insn_noperands;
static int insn_counter = 0;
-#ifdef HAVE_cc0
/* This variable contains machine-dependent flags (defined in tm.h)
set and examined by output routines
that describe how to interpret the condition codes properly. */
from before the insn. */
CC_STATUS cc_prev_status;
-#endif
/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
/* If we are outputting an insn sequence, this contains the sequence rtx.
Zero otherwise. */
-rtx final_sequence;
+rtx_sequence *final_sequence;
#ifdef ASSEMBLER_DIALECT
static int asm_insn_count (rtx);
static void profile_function (FILE *);
static void profile_after_prologue (FILE *);
-static bool notice_source_line (rtx, bool *);
+static bool notice_source_line (rtx_insn *, bool *);
static rtx walk_alter_subreg (rtx *, bool *);
static void output_asm_name (void);
-static void output_alternate_entry_point (FILE *, rtx);
+static void output_alternate_entry_point (FILE *, rtx_insn *);
static tree get_mem_expr_from_op (rtx, int *);
static void output_asm_operand_names (rtx *, int *, int);
#ifdef LEAF_REGISTERS
-static void leaf_renumber_regs (rtx);
+static void leaf_renumber_regs (rtx_insn *);
#endif
-#ifdef HAVE_cc0
+#if HAVE_cc0
static int alter_cond (rtx);
#endif
#ifndef ADDR_VEC_ALIGN
static int final_addr_vec_align (rtx);
#endif
static int align_fuzz (rtx, rtx, int, unsigned);
+static void collect_fn_hard_reg_usage (void);
+static tree get_call_fndecl (rtx_insn *);
\f
/* Initialize data in final at the beginning of a compilation. */
delayed branch sequence (we don't count the insn needing the
delay slot). Zero if not in a delayed branch sequence. */
-#ifdef DELAY_SLOTS
int
dbr_sequence_length (void)
{
else
return 0;
}
-#endif
\f
/* The next two pages contain routines used to compute the length of an insn
and to shorten branches. */
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, use FALLBACK_FN to calculate the
length. */
-static inline int
-get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
+static int
+get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
{
rtx body;
int i;
else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
length = asm_insn_count (body) * fallback_fn (insn);
- else if (GET_CODE (body) == SEQUENCE)
- for (i = 0; i < XVECLEN (body, 0); i++)
- length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
+ else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
+ for (i = 0; i < seq->len (); i++)
+ length += get_attr_length_1 (seq->insn (i), fallback_fn);
else
length = fallback_fn (insn);
break;
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its maximum length. */
int
-get_attr_length (rtx insn)
+get_attr_length (rtx_insn *insn)
{
return get_attr_length_1 (insn, insn_default_length);
}
/* Obtain the current length of an insn. If branch shortening has been done,
get its actual length. Otherwise, get its minimum length. */
int
-get_attr_min_length (rtx insn)
+get_attr_min_length (rtx_insn *insn)
{
return get_attr_length_1 (insn, insn_min_length);
}
#endif
int
-default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
+default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
{
return 0;
}
int
-default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
+default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
{
return align_loops_max_skip;
}
int
-default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
+default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
{
return align_labels_max_skip;
}
int
-default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
+default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
{
return align_jumps_max_skip;
}
to exclude the branch size. */
int
-insn_current_reference_address (rtx branch)
+insn_current_reference_address (rtx_insn *branch)
{
- rtx dest, seq;
+ rtx dest;
int seq_uid;
if (! INSN_ADDRESSES_SET_P ())
return 0;
- seq = NEXT_INSN (PREV_INSN (branch));
+ rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
seq_uid = INSN_UID (seq);
if (!JUMP_P (branch))
/* This can happen for example on the PA; the objective is to know the
flow_loops_dump (dump_file, NULL, 1);
}
loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
- FOR_EACH_BB (bb)
+ FOR_EACH_BB_FN (bb, cfun)
if (bb->frequency > freq_max)
freq_max = bb->frequency;
freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
if (dump_file)
- fprintf(dump_file, "freq_max: %i\n",freq_max);
- FOR_EACH_BB (bb)
+ fprintf (dump_file, "freq_max: %i\n",freq_max);
+ FOR_EACH_BB_FN (bb, cfun)
{
- rtx label = BB_HEAD (bb);
+ rtx_insn *label = BB_HEAD (bb);
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
edge e;
edge_iterator ei;
|| optimize_bb_for_size_p (bb))
{
if (dump_file)
- fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
- bb->index, bb->frequency, bb->loop_father->num,
- bb_loop_depth (bb));
+ fprintf (dump_file,
+ "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
+ bb->index, bb->frequency, bb->loop_father->num,
+ bb_loop_depth (bb));
continue;
}
max_log = LABEL_ALIGN (label);
}
if (dump_file)
{
- fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
- bb->index, bb->frequency, bb->loop_father->num,
- bb_loop_depth (bb),
- fallthru_frequency, branch_frequency);
+ fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
+ " %2i fall %4i branch %4i",
+ bb->index, bb->frequency, bb->loop_father->num,
+ bb_loop_depth (bb),
+ fallthru_frequency, branch_frequency);
if (!bb->loop_father->inner && bb->loop_father->num)
fprintf (dump_file, " inner_loop");
if (bb->loop_father->header == bb)
&& (branch_frequency > freq_threshold
|| (bb->frequency > bb->prev_bb->frequency * 10
&& (bb->prev_bb->frequency
- <= ENTRY_BLOCK_PTR->frequency / 2))))
+ <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
{
log = JUMP_ALIGN (label);
if (dump_file)
- fprintf(dump_file, " jump alignment added.\n");
+ fprintf (dump_file, " jump alignment added.\n");
if (max_log < log)
{
max_log = log;
/* In case block is frequent and reached mostly by non-fallthru edge,
align it. It is most likely a first block of loop. */
if (has_fallthru
+ && !(single_succ_p (bb)
+ && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
&& optimize_bb_for_speed_p (bb)
&& branch_frequency + fallthru_frequency > freq_threshold
&& (branch_frequency
{
log = LOOP_ALIGN (label);
if (dump_file)
- fprintf(dump_file, " internal loop alignment added.\n");
+ fprintf (dump_file, " internal loop alignment added.\n");
if (max_log < log)
{
max_log = log;
return 0;
}
-struct rtl_opt_pass pass_compute_alignments =
+/* Grow the LABEL_ALIGN array after new labels are created. */
+
+static void
+grow_label_align (void)
+{
+ int old = max_labelno;
+ int n_labels;
+ int n_old_labels;
+
+ max_labelno = max_label_num ();
+
+ n_labels = max_labelno - min_labelno + 1;
+ n_old_labels = old - min_labelno + 1;
+
+ label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
+
+ /* Range of labels grows monotonically in the function. Failing here
+ means that the initialization of array got lost. */
+ gcc_assert (n_old_labels <= n_labels);
+
+ memset (label_align + n_old_labels, 0,
+ (n_labels - n_old_labels) * sizeof (struct label_alignment));
+}
+
+/* Update the already computed alignment information. LABEL_PAIRS is a vector
+ made up of pairs of labels for which the alignment information of the first
+ element will be copied from that of the second element. */
+
+void
+update_alignments (vec<rtx> &label_pairs)
+{
+ unsigned int i = 0;
+ rtx iter, label = NULL_RTX;
+
+ if (max_labelno != max_label_num ())
+ grow_label_align ();
+
+ FOR_EACH_VEC_ELT (label_pairs, i, iter)
+ if (i & 1)
+ {
+ LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
+ LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
+ }
+ else
+ label = iter;
+}
+
+namespace {
+
+const pass_data pass_data_compute_alignments =
{
- {
- RTL_PASS,
- "alignments", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- NULL, /* gate */
- compute_alignments, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_NONE, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_verify_rtl_sharing
- | TODO_ggc_collect /* todo_flags_finish */
- }
+ RTL_PASS, /* type */
+ "alignments", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_NONE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
};
+class pass_compute_alignments : public rtl_opt_pass
+{
+public:
+ pass_compute_alignments (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_compute_alignments, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *) { return compute_alignments (); }
+
+}; // class pass_compute_alignments
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_compute_alignments (gcc::context *ctxt)
+{
+ return new pass_compute_alignments (ctxt);
+}
+
\f
/* Make a pass over all insns and compute their actual lengths by shortening
any branches of variable length if possible. */
slots. */
void
-shorten_branches (rtx first)
+shorten_branches (rtx_insn *first)
{
- rtx insn;
+ rtx_insn *insn;
int max_uid;
int i;
int max_log;
int max_skip;
#define MAX_CODE_ALIGN 16
- rtx seq;
+ rtx_insn *seq;
int something_changed = 1;
char *varying_length;
rtx body;
uid_shuid = XNEWVEC (int, max_uid);
if (max_labelno != max_label_num ())
- {
- int old = max_labelno;
- int n_labels;
- int n_old_labels;
-
- max_labelno = max_label_num ();
-
- n_labels = max_labelno - min_labelno + 1;
- n_old_labels = old - min_labelno + 1;
-
- label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
-
- /* Range of labels grows monotonically in the function. Failing here
- means that the initialization of array got lost. */
- gcc_assert (n_old_labels <= n_labels);
-
- memset (label_align + n_old_labels, 0,
- (n_labels - n_old_labels) * sizeof (struct label_alignment));
- }
+ grow_label_align ();
/* Initialize label_align and set up uid_shuid to be strictly
monotonically rising with insn order. */
if (LABEL_P (insn))
{
- rtx next;
+ rtx_insn *next;
bool next_is_jumptable;
/* Merge in alignments computed by compute_alignments. */
}
else if (BARRIER_P (insn))
{
- rtx label;
+ rtx_insn *label;
for (label = insn; label && ! INSN_P (label);
label = NEXT_INSN (label))
#endif /* CASE_VECTOR_SHORTEN_MODE */
/* Compute initial lengths, addresses, and varying flags for each insn. */
- int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
+ int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
for (insn_current_address = 0, insn = first;
insn != 0;
INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
if (NOTE_P (insn) || BARRIER_P (insn)
- || LABEL_P (insn) || DEBUG_INSN_P(insn))
+ || LABEL_P (insn) || DEBUG_INSN_P (insn))
continue;
- if (INSN_DELETED_P (insn))
+ if (insn->deleted ())
continue;
body = PATTERN (insn);
}
else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
- else if (GET_CODE (body) == SEQUENCE)
+ else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
{
int i;
int const_delay_slots;
-#ifdef DELAY_SLOTS
- const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
-#else
- const_delay_slots = 0;
-#endif
- int (*inner_length_fun) (rtx)
+ if (DELAY_SLOTS)
+ const_delay_slots = const_num_delay_slots (body_seq->insn (0));
+ else
+ const_delay_slots = 0;
+
+ int (*inner_length_fun) (rtx_insn *)
= const_delay_slots ? length_fun : insn_default_length;
/* Inside a delay slot sequence, we do not do any branch shortening
if the shortening could change the number of delay slots
of the branch. */
- for (i = 0; i < XVECLEN (body, 0); i++)
+ for (i = 0; i < body_seq->len (); i++)
{
- rtx inner_insn = XVECEXP (body, 0, i);
+ rtx_insn *inner_insn = body_seq->insn (i);
int inner_uid = INSN_UID (inner_insn);
int inner_length;
if (GET_CODE (body) == ASM_INPUT
- || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
+ || asm_noperands (PATTERN (inner_insn)) >= 0)
inner_length = (asm_insn_count (PATTERN (inner_insn))
* insn_default_length (inner_insn));
else
#ifdef CASE_VECTOR_SHORTEN_MODE
/* If the mode of a following jump table was changed, we
may need to update the alignment of this label. */
- rtx next;
+ rtx_insn *next;
bool next_is_jumptable;
next = next_nonnote_insn (insn);
{
rtx body = PATTERN (insn);
int old_length = insn_lengths[uid];
- rtx rel_lab = XEXP (XEXP (body, 0), 0);
+ rtx_insn *rel_lab =
+ safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
rtx min_lab = XEXP (XEXP (body, 2), 0);
rtx max_lab = XEXP (XEXP (body, 3), 0);
int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
- rtx prev;
+ rtx_insn *prev;
int rel_align = 0;
addr_diff_vec_flags flags;
- enum machine_mode vec_mode;
+ machine_mode vec_mode;
/* Avoid automatic aggregate initialization. */
flags = ADDR_DIFF_VEC_FLAGS (body);
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
+ rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
int i;
body = PATTERN (insn);
new_length = 0;
- for (i = 0; i < XVECLEN (body, 0); i++)
+ for (i = 0; i < seqn->len (); i++)
{
- rtx inner_insn = XVECEXP (body, 0, i);
+ rtx_insn *inner_insn = seqn->insn (i);
int inner_uid = INSN_UID (inner_insn);
int inner_length;
/* Structure recording the mapping from source file and directory
names at compile time to those to be embedded in debug
information. */
-typedef struct debug_prefix_map
+struct debug_prefix_map
{
const char *old_prefix;
const char *new_prefix;
size_t old_len;
size_t new_len;
struct debug_prefix_map *next;
-} debug_prefix_map;
+};
/* Linked list of such structures. */
-debug_prefix_map *debug_prefix_maps;
+static debug_prefix_map *debug_prefix_maps;
/* Record a debug file prefix mapping. ARG is the argument to
/* Emit lexical block notes needed to change scope from S1 to S2. */
static void
-change_scope (rtx orig_insn, tree s1, tree s2)
+change_scope (rtx_insn *orig_insn, tree s1, tree s2)
{
- rtx insn = orig_insn;
+ rtx_insn *insn = orig_insn;
tree com = NULL_TREE;
tree ts1 = s1, ts2 = s2;
tree s;
s = s1;
while (s != com)
{
- rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
+ rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
NOTE_BLOCK (note) = s;
s = BLOCK_SUPERCONTEXT (s);
}
reemit_insn_block_notes (void)
{
tree cur_block = DECL_INITIAL (cfun->decl);
- rtx insn, note;
+ rtx_insn *insn;
+ rtx_note *note;
insn = get_insns ();
- if (!active_insn_p (insn))
- insn = next_active_insn (insn);
- for (; insn; insn = next_active_insn (insn))
+ for (; insn; insn = NEXT_INSN (insn))
{
tree this_block;
+ /* Prevent lexical blocks from straddling section boundaries. */
+ if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
+ {
+ for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
+ s = BLOCK_SUPERCONTEXT (s))
+ {
+ rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
+ NOTE_BLOCK (note) = s;
+ note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
+ NOTE_BLOCK (note) = s;
+ }
+ }
+
+ if (!active_insn_p (insn))
+ continue;
+
/* Avoid putting scope notes between jump table and its label. */
if (JUMP_TABLE_DATA_P (insn))
continue;
this_block = insn_scope (insn);
/* For sequences compute scope resulting from merging all scopes
of instructions nested inside. */
- if (GET_CODE (PATTERN (insn)) == SEQUENCE)
+ if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
{
int i;
- rtx body = PATTERN (insn);
this_block = NULL;
- for (i = 0; i < XVECLEN (body, 0); i++)
+ for (i = 0; i < body->len (); i++)
this_block = choose_inner_scope (this_block,
- insn_scope (XVECEXP (body, 0, i)));
+ insn_scope (body->insn (i)));
}
if (! this_block)
{
reorder_blocks ();
}
+static const char *some_local_dynamic_name;
+
+/* Locate some local-dynamic symbol still in use by this function
+ so that we can print its name in local-dynamic base patterns.
+ Return null if there are no local-dynamic references. */
+
+const char *
+get_some_local_dynamic_name ()
+{
+ subrtx_iterator::array_type array;
+ rtx_insn *insn;
+
+ if (some_local_dynamic_name)
+ return some_local_dynamic_name;
+
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
+ if (NONDEBUG_INSN_P (insn))
+ FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
+ {
+ const_rtx x = *iter;
+ if (GET_CODE (x) == SYMBOL_REF)
+ {
+ if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
+ return some_local_dynamic_name = XSTR (x, 0);
+ if (CONSTANT_POOL_ADDRESS_P (x))
+ iter.substitute (get_pool_constant (x));
+ }
+ }
+
+ return 0;
+}
+
/* Output assembler code for the start of a function,
and initialize some of the variables in this file
for the new function. The label for the function and associated
test and compare insns. */
void
-final_start_function (rtx first, FILE *file,
+final_start_function (rtx_insn *first, FILE *file,
int optimize_p ATTRIBUTE_UNUSED)
{
block_depth = 0;
high_block_linenum = high_function_linenum = last_linenum;
+ if (flag_sanitize & SANITIZE_ADDRESS)
+ asan_function_start ();
+
if (!DECL_IGNORED_P (current_function_decl))
debug_hooks->begin_prologue (last_linenum, last_filename);
if the profiling code comes after the prologue. */
if (targetm.profile_before_prologue () && crtl->profile)
{
- if (targetm.asm_out.function_prologue
- == default_function_pro_epilogue
-#ifdef HAVE_prologue
- && HAVE_prologue
-#endif
- )
+ if (targetm.asm_out.function_prologue == default_function_pro_epilogue
+ && targetm.have_prologue ())
{
- rtx insn;
+ rtx_insn *insn;
for (insn = first; insn; insn = NEXT_INSN (insn))
if (!NOTE_P (insn))
{
- insn = NULL_RTX;
+ insn = NULL;
break;
}
else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
continue;
else
{
- insn = NULL_RTX;
+ insn = NULL;
break;
}
/* If the machine represents the prologue as RTL, the profiling code must
be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
-#ifdef HAVE_prologue
- if (! HAVE_prologue)
-#endif
+ if (! targetm.have_prologue ())
profile_after_prologue (file);
}
if (!dwarf2_debug_info_emitted_p (current_function_decl)
&& dwarf2out_do_frame ())
dwarf2out_end_epilogue (last_linenum, last_filename);
+
+ some_local_dynamic_name = 0;
}
\f
output file, and INSN is the instruction being emitted. */
static void
-dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
+dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
{
basic_block bb;
if (bb->frequency)
fprintf (file, " freq:%d", bb->frequency);
if (bb->count)
- fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
+ fprintf (file, " count:%" PRId64,
bb->count);
fprintf (file, " seq:%d", (*bb_seqn)++);
fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
For description of args, see `final_start_function', above. */
void
-final (rtx first, FILE *file, int optimize_p)
+final (rtx_insn *first, FILE *file, int optimize_p)
{
- rtx insn, next;
+ rtx_insn *insn, *next;
int seen = 0;
/* Used for -dA dump. */
last_ignored_compare = 0;
-#ifdef HAVE_cc0
- for (insn = first; insn; insn = NEXT_INSN (insn))
- {
- /* If CC tracking across branches is enabled, record the insn which
- jumps to each branch only reached from one place. */
- if (optimize_p && JUMP_P (insn))
- {
- rtx lab = JUMP_LABEL (insn);
- if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
- {
- LABEL_REFS (lab) = insn;
- }
- }
- }
-#endif
+ if (HAVE_cc0)
+ for (insn = first; insn; insn = NEXT_INSN (insn))
+ {
+ /* If CC tracking across branches is enabled, record the insn which
+ jumps to each branch only reached from one place. */
+ if (optimize_p && JUMP_P (insn))
+ {
+ rtx lab = JUMP_LABEL (insn);
+ if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
+ {
+ LABEL_REFS (lab) = insn;
+ }
+ }
+ }
init_recog ();
/* There is no cfg for a thunk. */
if (!cfun->is_thunk)
- FOR_EACH_BB_REVERSE (bb)
+ FOR_EACH_BB_REVERSE_FN (bb, cfun)
{
start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
end_to_bb[INSN_UID (BB_END (bb))] = bb;
return insn_data[code].output.multi[which_alternative];
case INSN_OUTPUT_FORMAT_FUNCTION:
gcc_assert (insn);
- return (*insn_data[code].output.function) (recog_data.operand, insn);
+ return (*insn_data[code].output.function) (recog_data.operand,
+ as_a <rtx_insn *> (insn));
default:
gcc_unreachable ();
The case fall-through in this function is intentional. */
static void
-output_alternate_entry_point (FILE *file, rtx insn)
+output_alternate_entry_point (FILE *file, rtx_insn *insn)
{
const char *name = LABEL_NAME (insn);
/* Given a CALL_INSN, find and return the nested CALL. */
static rtx
-call_from_call_insn (rtx insn)
+call_from_call_insn (rtx_call_insn *insn)
{
rtx x;
gcc_assert (CALL_P (insn));
SEEN is used to track the end of the prologue, for emitting
debug information. We force the emission of a line note after
- both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
- at the beginning of the second basic block, whichever comes
- first. */
+ both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
-rtx
-final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
+rtx_insn *
+final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
int nopeepholes ATTRIBUTE_UNUSED, int *seen)
{
-#ifdef HAVE_cc0
+#if HAVE_cc0
rtx set;
#endif
- rtx next;
+ rtx_insn *next;
insn_counter++;
/* Ignore deleted insns. These can occur when we split insns (due to a
template of "#") while not optimizing. */
- if (INSN_DELETED_P (insn))
+ if (insn->deleted ())
return NEXT_INSN (insn);
switch (GET_CODE (insn))
switch (NOTE_KIND (insn))
{
case NOTE_INSN_DELETED:
+ case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
break;
case NOTE_INSN_SWITCH_TEXT_SECTIONS:
targetm.asm_out.function_switched_text_sections (asm_out_file,
current_function_decl,
in_cold_section_p);
+ /* Emit a label for the split cold section. Form label name by
+ suffixing "cold" to the original function's name. */
+ if (in_cold_section_p)
+ {
+ cold_function_name
+ = clone_function_name (current_function_decl, "cold");
+#ifdef ASM_DECLARE_COLD_FUNCTION_NAME
+ ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
+ IDENTIFIER_POINTER
+ (cold_function_name),
+ current_function_decl);
+#else
+ ASM_OUTPUT_LABEL (asm_out_file,
+ IDENTIFIER_POINTER (cold_function_name));
+#endif
+ }
break;
case NOTE_INSN_BASIC_BLOCK:
if (targetm.asm_out.unwind_emit)
targetm.asm_out.unwind_emit (asm_out_file, insn);
- if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
- {
- *seen |= SEEN_EMITTED;
- force_source_line = true;
- }
- else
- *seen |= SEEN_BB;
-
discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
break;
CC_STATUS_INIT;
if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
- debug_hooks->label (insn);
+ debug_hooks->label (as_a <rtx_code_label *> (insn));
app_disable ();
|| GET_CODE (body) == CLOBBER)
break;
-#ifdef HAVE_cc0
+#if HAVE_cc0
{
/* If there is a REG_CC_SETTER note on this insn, it means that
the setting of the condition code was done in the delay slot
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
{
- NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
+ rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
+ NOTICE_UPDATE_CC (PATTERN (other), other);
cc_prev_status = cc_status;
}
}
app_disable ();
- if (GET_CODE (body) == SEQUENCE)
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
{
/* A delayed-branch sequence */
int i;
- final_sequence = body;
+ final_sequence = seq;
/* The first insn in this SEQUENCE might be a JUMP_INSN that will
force the restoration of a comparison that was previously
thought unnecessary. If that happens, cancel this sequence
and cause that insn to be restored. */
- next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
- if (next != XVECEXP (body, 0, 1))
+ next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
+ if (next != seq->insn (1))
{
final_sequence = 0;
return next;
}
- for (i = 1; i < XVECLEN (body, 0); i++)
+ for (i = 1; i < seq->len (); i++)
{
- rtx insn = XVECEXP (body, 0, i);
- rtx next = NEXT_INSN (insn);
+ rtx_insn *insn = seq->insn (i);
+ rtx_insn *next = NEXT_INSN (insn);
/* We loop in case any instruction in a delay slot gets
split. */
do
called function. Hence we don't preserve any CC-setting
actions in these insns and the CC must be marked as being
clobbered by the function. */
- if (CALL_P (XVECEXP (body, 0, 0)))
+ if (CALL_P (seq->insn (0)))
{
CC_STATUS_INIT;
}
body = PATTERN (insn);
-#ifdef HAVE_cc0
+#if HAVE_cc0
set = single_set (insn);
/* Check for redundant test and compare instructions
#endif
-#ifdef HAVE_peephole
/* Do machine-specific peephole optimizations if desired. */
- if (optimize_p && !flag_no_peephole && !nopeepholes)
+ if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
{
- rtx next = peephole (insn);
+ rtx_insn *next = peephole (insn);
/* When peepholing, if there were notes within the peephole,
emit them before the peephole. */
if (next != 0 && next != NEXT_INSN (insn))
{
- rtx note, prev = PREV_INSN (insn);
+ rtx_insn *note, *prev = PREV_INSN (insn);
for (note = NEXT_INSN (insn); note != next;
note = NEXT_INSN (note))
when generating a far jump in a delayed branch
sequence. */
note = NEXT_INSN (insn);
- PREV_INSN (note) = prev;
- NEXT_INSN (prev) = note;
- NEXT_INSN (PREV_INSN (next)) = insn;
- PREV_INSN (insn) = PREV_INSN (next);
- NEXT_INSN (insn) = next;
- PREV_INSN (next) = insn;
+ SET_PREV_INSN (note) = prev;
+ SET_NEXT_INSN (prev) = note;
+ SET_NEXT_INSN (PREV_INSN (next)) = insn;
+ SET_PREV_INSN (insn) = PREV_INSN (next);
+ SET_NEXT_INSN (insn) = next;
+ SET_PREV_INSN (next) = insn;
}
/* PEEPHOLE might have changed this. */
body = PATTERN (insn);
}
-#endif
/* Try to recognize the instruction.
If successful, verify that the operands satisfy the
print_rtx_head = "";
}
- if (! constrain_operands_cached (1))
+ if (! constrain_operands_cached (insn, 1))
fatal_insn_not_found (insn);
/* Some target machines need to prescan each insn before
&& GET_CODE (PATTERN (insn)) == COND_EXEC)
current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
-#ifdef HAVE_cc0
+#if HAVE_cc0
cc_prev_status = cc_status;
/* Update `cc_status' for this instruction.
needs to be reinserted. */
if (templ == 0)
{
- rtx prev;
+ rtx_insn *prev;
gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
be split. */
if (templ[0] == '#' && templ[1] == '\0')
{
- rtx new_rtx = try_split (body, insn, 0);
+ rtx_insn *new_rtx = try_split (body, insn, 0);
/* If we didn't split the insn, go away. */
if (new_rtx == insn && PATTERN (new_rtx) == body)
&& targetm.asm_out.unwind_emit)
targetm.asm_out.unwind_emit (asm_out_file, insn);
- if (CALL_P (insn))
+ if (rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn))
{
- rtx x = call_from_call_insn (insn);
+ rtx x = call_from_call_insn (call_insn);
x = XEXP (x, 0);
if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
{
breakpoint location. */
static bool
-notice_source_line (rtx insn, bool *is_stmt)
+notice_source_line (rtx_insn *insn, bool *is_stmt)
{
const char *filename;
int linenum;
filename = override_filename;
linenum = override_linenum;
}
+ else if (INSN_HAS_LOCATION (insn))
+ {
+ expanded_location xloc = insn_location (insn);
+ filename = xloc.file;
+ linenum = xloc.line;
+ }
else
{
- filename = insn_file (insn);
- linenum = insn_line (insn);
+ filename = NULL;
+ linenum = 0;
}
if (filename == NULL)
directly to the desired hard register. */
void
-cleanup_subreg_operands (rtx insn)
+cleanup_subreg_operands (rtx_insn *insn)
{
int i;
bool changed = false;
else
*xp = adjust_address_nv (y, GET_MODE (x), offset);
}
- else
+ else if (REG_P (y) && HARD_REGISTER_P (y))
{
rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
SUBREG_BYTE (x));
return *xp;
}
\f
-#ifdef HAVE_cc0
+#if HAVE_cc0
/* Given BODY, the body of a jump instruction, alter the jump condition
as required by the bits that are set in cc_status.flags.
va_start (ap, cmsgid);
pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
- asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
- vasprintf (&new_message, fmt_string, ap);
+ fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
+ new_message = xvasprintf (fmt_string, ap);
if (this_is_asm_operands)
error_for_asm (this_is_asm_operands, "%s", new_message);
DIALECT_NUMBER of strings ending with '|'. */
for (i = 0; i < dialect_number; i++)
{
- while (*p && *p != '}' && *p++ != '|')
- ;
+ while (*p && *p != '}')
+ {
+ if (*p == '|')
+ {
+ p++;
+ break;
+ }
+
+ /* Skip over any character after a percent sign. */
+ if (*p == '%')
+ p++;
+ if (*p)
+ p++;
+ }
+
if (*p == '}')
break;
}
output_operand_lossage ("unterminated assembly dialect alternative");
break;
}
+
+ /* Skip over any character after a percent sign. */
+ if (*p == '%' && p[1])
+ {
+ p += 2;
+ continue;
+ }
+
+ if (*p++ == '}')
+ break;
}
- while (*p++ != '}');
+ while (1);
+
*dialect = 0;
}
else
#endif
case '%':
- /* %% outputs a single %. */
- if (*p == '%')
+ /* %% outputs a single %. %{, %} and %| print {, } and | respectively
+ if ASSEMBLER_DIALECT defined and these characters have a special
+ meaning as dialect delimiters.*/
+ if (*p == '%'
+#ifdef ASSEMBLER_DIALECT
+ || *p == '{' || *p == '}' || *p == '|'
+#endif
+ )
{
+ putc (*p, asm_out_file);
p++;
- putc (c, asm_out_file);
}
/* %= outputs a number which is unique to each insn in the entire
compilation. This is useful for making local labels that are
char buf[256];
if (GET_CODE (x) == LABEL_REF)
- x = XEXP (x, 0);
+ x = LABEL_REF_LABEL (x);
if (LABEL_P (x)
|| (NOTE_P (x)
&& NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
assemble_name (asm_out_file, buf);
}
-/* Helper rtx-iteration-function for mark_symbol_refs_as_used and
- output_operand. Marks SYMBOL_REFs as referenced through use of
- assemble_external. */
-
-static int
-mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
-{
- rtx x = *xp;
-
- /* If we have a used symbol, we may have to emit assembly
- annotations corresponding to whether the symbol is external, weak
- or has non-default visibility. */
- if (GET_CODE (x) == SYMBOL_REF)
- {
- tree t;
-
- t = SYMBOL_REF_DECL (x);
- if (t)
- assemble_external (t);
-
- return -1;
- }
-
- return 0;
-}
-
/* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
void
mark_symbol_refs_as_used (rtx x)
{
- for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, x, ALL)
+ {
+ const_rtx x = *iter;
+ if (GET_CODE (x) == SYMBOL_REF)
+ if (tree t = SYMBOL_REF_DECL (x))
+ assemble_external (t);
+ }
}
/* Print operand X using machine-dependent assembler syntax.
x = alter_subreg (&x, true);
/* X must not be a pseudo reg. */
- gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
+ if (!targetm.no_register_allocation)
+ gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
targetm.asm_out.print_operand (asm_out_file, x, code);
if (x == NULL_RTX)
return;
- for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
+ mark_symbol_refs_as_used (x);
}
/* Print a memory reference operand for address X using
break;
case LABEL_REF:
- x = XEXP (x, 0);
+ x = LABEL_REF_LABEL (x);
/* Fall through. */
case CODE_LABEL:
ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
output_addr_const (file, XEXP (x, 0));
break;
+ case CONST_WIDE_INT:
+ /* We do not know the mode here so we have to use a round about
+ way to build a wide-int to get it printed properly. */
+ {
+ wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
+ CONST_WIDE_INT_NUNITS (x),
+ CONST_WIDE_INT_NUNITS (x)
+ * HOST_BITS_PER_WIDE_INT,
+ false);
+ print_decs (w, file);
+ }
+ break;
+
case CONST_DOUBLE:
- if (GET_MODE (x) == VOIDmode)
+ if (CONST_DOUBLE_AS_INT_P (x))
{
/* We can use %d if the number is one word and positive. */
if (CONST_DOUBLE_HIGH (x))
int
sprint_ul (char *s, unsigned long value)
{
- int len;
- char tmp_c;
- int i;
- int j;
-
- len = sprint_ul_rev (s, value);
+ int len = sprint_ul_rev (s, value);
s[len] = '\0';
- /* Reverse the string. */
- i = 0;
- j = len - 1;
- while (i < j)
- {
- tmp_c = s[i];
- s[i] = s[j];
- s[j] = tmp_c;
- i++; j--;
- }
-
+ std::reverse (s, s + len);
return len;
}
int
leaf_function_p (void)
{
- rtx insn;
+ rtx_insn *insn;
- if (crtl->profile || profile_arc_flag)
+ /* Some back-ends (e.g. s390) want leaf functions to stay leaf
+ functions even if they call mcount. */
+ if (crtl->profile && !targetm.keep_leaf_when_profiled ())
return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
output templates to customary add branch prediction hints.
*/
int
-final_forward_branch_p (rtx insn)
+final_forward_branch_p (rtx_insn *insn)
{
int insn_id, label_id;
available in leaf functions. */
static void
-leaf_renumber_regs (rtx first)
+leaf_renumber_regs (rtx_insn *first)
{
- rtx insn;
+ rtx_insn *insn;
/* Renumber only the actual patterns.
The reg-notes can contain frame pointer refs,
df_set_regs_ever_live (newreg, true);
SET_REGNO (in_rtx, newreg);
in_rtx->used = 1;
+ return;
}
if (INSN_P (in_rtx))
static unsigned int
rest_of_handle_final (void)
{
- rtx x;
- const char *fnname;
-
- /* Get the function's name, as described by its RTL. This may be
- different from the DECL_NAME name used in the source file. */
-
- x = DECL_RTL (current_function_decl);
- gcc_assert (MEM_P (x));
- x = XEXP (x, 0);
- gcc_assert (GET_CODE (x) == SYMBOL_REF);
- fnname = XSTR (x, 0);
+ const char *fnname = get_fnname_from_decl (current_function_decl);
assemble_start_function (current_function_decl, fnname);
final_start_function (get_insns (), asm_out_file, optimize);
final (get_insns (), asm_out_file, optimize);
+ if (flag_ipa_ra)
+ collect_fn_hard_reg_usage ();
final_end_function ();
/* The IA-64 ".handlerdata" directive must be issued before the ".endp"
return 0;
}
-struct rtl_opt_pass pass_final =
+namespace {
+
+const pass_data pass_data_final =
{
- {
- RTL_PASS,
- "final", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- NULL, /* gate */
- rest_of_handle_final, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_FINAL, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_ggc_collect /* todo_flags_finish */
- }
+ RTL_PASS, /* type */
+ "final", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_FINAL, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
};
+class pass_final : public rtl_opt_pass
+{
+public:
+ pass_final (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_final, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *) { return rest_of_handle_final (); }
+
+}; // class pass_final
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_final (gcc::context *ctxt)
+{
+ return new pass_final (ctxt);
+}
+
static unsigned int
rest_of_handle_shorten_branches (void)
return 0;
}
-struct rtl_opt_pass pass_shorten_branches =
+namespace {
+
+const pass_data pass_data_shorten_branches =
{
- {
- RTL_PASS,
- "shorten", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- NULL, /* gate */
- rest_of_handle_shorten_branches, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_SHORTEN_BRANCH, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0 /* todo_flags_finish */
- }
+ RTL_PASS, /* type */
+ "shorten", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_SHORTEN_BRANCH, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
};
+class pass_shorten_branches : public rtl_opt_pass
+{
+public:
+ pass_shorten_branches (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_shorten_branches, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *)
+ {
+ return rest_of_handle_shorten_branches ();
+ }
+
+}; // class pass_shorten_branches
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_shorten_branches (gcc::context *ctxt)
+{
+ return new pass_shorten_branches (ctxt);
+}
+
static unsigned int
rest_of_clean_state (void)
{
- rtx insn, next;
+ rtx_insn *insn, *next;
FILE *final_output = NULL;
int save_unnumbered = flag_dump_unnumbered;
int save_noaddr = flag_dump_noaddr;
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
- NEXT_INSN (insn) = NULL;
- PREV_INSN (insn) = NULL;
+ SET_NEXT_INSN (insn) = NULL;
+ SET_PREV_INSN (insn) = NULL;
if (final_output
&& (!NOTE_P (insn) ||
free_bb_for_insn ();
- delete_tree_ssa ();
+ delete_tree_ssa (cfun);
/* We can reduce stack alignment on call site only when we are sure that
the function body just produced will be actually used in the final
unsigned int pref = crtl->preferred_stack_boundary;
if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
pref = crtl->stack_alignment_needed;
- cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
- = pref;
+ cgraph_node::rtl_info (current_function_decl)
+ ->preferred_incoming_stack_boundary = pref;
}
/* Make sure volatile mem refs aren't considered valid operands for
return 0;
}
-struct rtl_opt_pass pass_clean_state =
+namespace {
+
+const pass_data pass_data_clean_state =
{
- {
- RTL_PASS,
- "*clean_state", /* name */
- OPTGROUP_NONE, /* optinfo_flags */
- NULL, /* gate */
- rest_of_clean_state, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_FINAL, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- PROP_rtl, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0 /* todo_flags_finish */
- }
+ RTL_PASS, /* type */
+ "*clean_state", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_FINAL, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ PROP_rtl, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
};
+
+class pass_clean_state : public rtl_opt_pass
+{
+public:
+ pass_clean_state (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_clean_state, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ virtual unsigned int execute (function *)
+ {
+ return rest_of_clean_state ();
+ }
+
+}; // class pass_clean_state
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_clean_state (gcc::context *ctxt)
+{
+ return new pass_clean_state (ctxt);
+}
+
+/* Return true if INSN is a call to the current function. */
+
+static bool
+self_recursive_call_p (rtx_insn *insn)
+{
+ tree fndecl = get_call_fndecl (insn);
+ return (fndecl == current_function_decl
+ && decl_binds_to_current_def_p (fndecl));
+}
+
+/* Collect hard register usage for the current function. */
+
+static void
+collect_fn_hard_reg_usage (void)
+{
+ rtx_insn *insn;
+#ifdef STACK_REGS
+ int i;
+#endif
+ struct cgraph_rtl_info *node;
+ HARD_REG_SET function_used_regs;
+
+ /* ??? To be removed when all the ports have been fixed. */
+ if (!targetm.call_fusage_contains_non_callee_clobbers)
+ return;
+
+ CLEAR_HARD_REG_SET (function_used_regs);
+
+ for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
+ {
+ HARD_REG_SET insn_used_regs;
+
+ if (!NONDEBUG_INSN_P (insn))
+ continue;
+
+ if (CALL_P (insn)
+ && !self_recursive_call_p (insn))
+ {
+ if (!get_call_reg_set_usage (insn, &insn_used_regs,
+ call_used_reg_set))
+ return;
+
+ IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
+ }
+
+ find_all_hard_reg_sets (insn, &insn_used_regs, false);
+ IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
+ }
+
+ /* Be conservative - mark fixed and global registers as used. */
+ IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
+
+#ifdef STACK_REGS
+ /* Handle STACK_REGS conservatively, since the df-framework does not
+ provide accurate information for them. */
+
+ for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
+ SET_HARD_REG_BIT (function_used_regs, i);
+#endif
+
+ /* The information we have gathered is only interesting if it exposes a
+ register from the call_used_regs that is not used in this function. */
+ if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
+ return;
+
+ node = cgraph_node::rtl_info (current_function_decl);
+ gcc_assert (node != NULL);
+
+ COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
+ node->function_used_regs_valid = 1;
+}
+
+/* Get the declaration of the function called by INSN. */
+
+static tree
+get_call_fndecl (rtx_insn *insn)
+{
+ rtx note, datum;
+
+ note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
+ if (note == NULL_RTX)
+ return NULL_TREE;
+
+ datum = XEXP (note, 0);
+ if (datum != NULL_RTX)
+ return SYMBOL_REF_DECL (datum);
+
+ return NULL_TREE;
+}
+
+/* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
+ call targets that can be overwritten. */
+
+static struct cgraph_rtl_info *
+get_call_cgraph_rtl_info (rtx_insn *insn)
+{
+ tree fndecl;
+
+ if (insn == NULL_RTX)
+ return NULL;
+
+ fndecl = get_call_fndecl (insn);
+ if (fndecl == NULL_TREE
+ || !decl_binds_to_current_def_p (fndecl))
+ return NULL;
+
+ return cgraph_node::rtl_info (fndecl);
+}
+
+/* Find hard registers used by function call instruction INSN, and return them
+ in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
+
+bool
+get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
+ HARD_REG_SET default_set)
+{
+ if (flag_ipa_ra)
+ {
+ struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
+ if (node != NULL
+ && node->function_used_regs_valid)
+ {
+ COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
+ AND_HARD_REG_SET (*reg_set, default_set);
+ return true;
+ }
+ }
+
+ COPY_HARD_REG_SET (*reg_set, default_set);
+ return false;
+}