35522f16125cd28e9314f7419b0e7e460e4c933c
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
22
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
28
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
33
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
36
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
40
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "target.h"
50 #include "rtl.h"
51 #include "tree.h"
52 #include "cfghooks.h"
53 #include "df.h"
54 #include "tm_p.h"
55 #include "insn-config.h"
56 #include "regs.h"
57 #include "emit-rtl.h"
58 #include "recog.h"
59 #include "cgraph.h"
60 #include "tree-pretty-print.h" /* for dump_function_header */
61 #include "varasm.h"
62 #include "insn-attr.h"
63 #include "conditions.h"
64 #include "flags.h"
65 #include "output.h"
66 #include "except.h"
67 #include "rtl-error.h"
68 #include "toplev.h" /* exact_log2, floor_log2 */
69 #include "reload.h"
70 #include "intl.h"
71 #include "cfgrtl.h"
72 #include "debug.h"
73 #include "tree-pass.h"
74 #include "tree-ssa.h"
75 #include "cfgloop.h"
76 #include "params.h"
77 #include "asan.h"
78 #include "rtl-iter.h"
79 #include "print-rtl.h"
80
81 #ifdef XCOFF_DEBUGGING_INFO
82 #include "xcoffout.h" /* Needed for external data declarations. */
83 #endif
84
85 #include "dwarf2out.h"
86
87 #ifdef DBX_DEBUGGING_INFO
88 #include "dbxout.h"
89 #endif
90
91 #ifdef SDB_DEBUGGING_INFO
92 #include "sdbout.h"
93 #endif
94
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96 So define a null default for it to save conditionalization later. */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
99 #endif
100
101 /* Is the given character a logical line separator for the assembler? */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #endif
105
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #endif
109
110 /* Bitflags used by final_scan_insn. */
111 #define SEEN_NOTE 1
112 #define SEEN_EMITTED 2
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx_insn *debug_insn;
116 rtx_insn *current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Last discriminator written to assembly. */
122 static int last_discriminator;
123
124 /* Discriminator of current block. */
125 static int discriminator;
126
127 /* Highest line number in current block. */
128 static int high_block_linenum;
129
130 /* Likewise for function. */
131 static int high_function_linenum;
132
133 /* Filename of last NOTE. */
134 static const char *last_filename;
135
136 /* Override filename and line number. */
137 static const char *override_filename;
138 static int override_linenum;
139
140 /* Whether to force emission of a line note before the next insn. */
141 static bool force_source_line = false;
142
143 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
144
145 /* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148 const rtx_insn *this_is_asm_operands;
149
150 /* Number of operands of this insn, for an `asm' with operands. */
151 static unsigned int insn_noperands;
152
153 /* Compare optimization flag. */
154
155 static rtx last_ignored_compare = 0;
156
157 /* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
159
160 static int insn_counter = 0;
161
162 /* This variable contains machine-dependent flags (defined in tm.h)
163 set and examined by output routines
164 that describe how to interpret the condition codes properly. */
165
166 CC_STATUS cc_status;
167
168 /* During output of an insn, this contains a copy of cc_status
169 from before the insn. */
170
171 CC_STATUS cc_prev_status;
172
173 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
174
175 static int block_depth;
176
177 /* Nonzero if have enabled APP processing of our assembler output. */
178
179 static int app_on;
180
181 /* If we are outputting an insn sequence, this contains the sequence rtx.
182 Zero otherwise. */
183
184 rtx_sequence *final_sequence;
185
186 #ifdef ASSEMBLER_DIALECT
187
188 /* Number of the assembler dialect to use, starting at 0. */
189 static int dialect_number;
190 #endif
191
192 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
193 rtx current_insn_predicate;
194
195 /* True if printing into -fdump-final-insns= dump. */
196 bool final_insns_dump_p;
197
198 /* True if profile_function should be called, but hasn't been called yet. */
199 static bool need_profile_function;
200
201 static int asm_insn_count (rtx);
202 static void profile_function (FILE *);
203 static void profile_after_prologue (FILE *);
204 static bool notice_source_line (rtx_insn *, bool *);
205 static rtx walk_alter_subreg (rtx *, bool *);
206 static void output_asm_name (void);
207 static void output_alternate_entry_point (FILE *, rtx_insn *);
208 static tree get_mem_expr_from_op (rtx, int *);
209 static void output_asm_operand_names (rtx *, int *, int);
210 #ifdef LEAF_REGISTERS
211 static void leaf_renumber_regs (rtx_insn *);
212 #endif
213 #if HAVE_cc0
214 static int alter_cond (rtx);
215 #endif
216 #ifndef ADDR_VEC_ALIGN
217 static int final_addr_vec_align (rtx);
218 #endif
219 static int align_fuzz (rtx, rtx, int, unsigned);
220 static void collect_fn_hard_reg_usage (void);
221 static tree get_call_fndecl (rtx_insn *);
222 \f
223 /* Initialize data in final at the beginning of a compilation. */
224
225 void
226 init_final (const char *filename ATTRIBUTE_UNUSED)
227 {
228 app_on = 0;
229 final_sequence = 0;
230
231 #ifdef ASSEMBLER_DIALECT
232 dialect_number = ASSEMBLER_DIALECT;
233 #endif
234 }
235
236 /* Default target function prologue and epilogue assembler output.
237
238 If not overridden for epilogue code, then the function body itself
239 contains return instructions wherever needed. */
240 void
241 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
243 {
244 }
245
246 void
247 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
248 tree decl ATTRIBUTE_UNUSED,
249 bool new_is_cold ATTRIBUTE_UNUSED)
250 {
251 }
252
253 /* Default target hook that outputs nothing to a stream. */
254 void
255 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
256 {
257 }
258
259 /* Enable APP processing of subsequent output.
260 Used before the output from an `asm' statement. */
261
262 void
263 app_enable (void)
264 {
265 if (! app_on)
266 {
267 fputs (ASM_APP_ON, asm_out_file);
268 app_on = 1;
269 }
270 }
271
272 /* Disable APP processing of subsequent output.
273 Called from varasm.c before most kinds of output. */
274
275 void
276 app_disable (void)
277 {
278 if (app_on)
279 {
280 fputs (ASM_APP_OFF, asm_out_file);
281 app_on = 0;
282 }
283 }
284 \f
285 /* Return the number of slots filled in the current
286 delayed branch sequence (we don't count the insn needing the
287 delay slot). Zero if not in a delayed branch sequence. */
288
289 int
290 dbr_sequence_length (void)
291 {
292 if (final_sequence != 0)
293 return XVECLEN (final_sequence, 0) - 1;
294 else
295 return 0;
296 }
297 \f
298 /* The next two pages contain routines used to compute the length of an insn
299 and to shorten branches. */
300
301 /* Arrays for insn lengths, and addresses. The latter is referenced by
302 `insn_current_length'. */
303
304 static int *insn_lengths;
305
306 vec<int> insn_addresses_;
307
308 /* Max uid for which the above arrays are valid. */
309 static int insn_lengths_max_uid;
310
311 /* Address of insn being processed. Used by `insn_current_length'. */
312 int insn_current_address;
313
314 /* Address of insn being processed in previous iteration. */
315 int insn_last_address;
316
317 /* known invariant alignment of insn being processed. */
318 int insn_current_align;
319
320 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
321 gives the next following alignment insn that increases the known
322 alignment, or NULL_RTX if there is no such insn.
323 For any alignment obtained this way, we can again index uid_align with
324 its uid to obtain the next following align that in turn increases the
325 alignment, till we reach NULL_RTX; the sequence obtained this way
326 for each insn we'll call the alignment chain of this insn in the following
327 comments. */
328
329 struct label_alignment
330 {
331 short alignment;
332 short max_skip;
333 };
334
335 static rtx *uid_align;
336 static int *uid_shuid;
337 static struct label_alignment *label_align;
338
339 /* Indicate that branch shortening hasn't yet been done. */
340
341 void
342 init_insn_lengths (void)
343 {
344 if (uid_shuid)
345 {
346 free (uid_shuid);
347 uid_shuid = 0;
348 }
349 if (insn_lengths)
350 {
351 free (insn_lengths);
352 insn_lengths = 0;
353 insn_lengths_max_uid = 0;
354 }
355 if (HAVE_ATTR_length)
356 INSN_ADDRESSES_FREE ();
357 if (uid_align)
358 {
359 free (uid_align);
360 uid_align = 0;
361 }
362 }
363
364 /* Obtain the current length of an insn. If branch shortening has been done,
365 get its actual length. Otherwise, use FALLBACK_FN to calculate the
366 length. */
367 static int
368 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
369 {
370 rtx body;
371 int i;
372 int length = 0;
373
374 if (!HAVE_ATTR_length)
375 return 0;
376
377 if (insn_lengths_max_uid > INSN_UID (insn))
378 return insn_lengths[INSN_UID (insn)];
379 else
380 switch (GET_CODE (insn))
381 {
382 case NOTE:
383 case BARRIER:
384 case CODE_LABEL:
385 case DEBUG_INSN:
386 return 0;
387
388 case CALL_INSN:
389 case JUMP_INSN:
390 length = fallback_fn (insn);
391 break;
392
393 case INSN:
394 body = PATTERN (insn);
395 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
396 return 0;
397
398 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
399 length = asm_insn_count (body) * fallback_fn (insn);
400 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
401 for (i = 0; i < seq->len (); i++)
402 length += get_attr_length_1 (seq->insn (i), fallback_fn);
403 else
404 length = fallback_fn (insn);
405 break;
406
407 default:
408 break;
409 }
410
411 #ifdef ADJUST_INSN_LENGTH
412 ADJUST_INSN_LENGTH (insn, length);
413 #endif
414 return length;
415 }
416
417 /* Obtain the current length of an insn. If branch shortening has been done,
418 get its actual length. Otherwise, get its maximum length. */
419 int
420 get_attr_length (rtx_insn *insn)
421 {
422 return get_attr_length_1 (insn, insn_default_length);
423 }
424
425 /* Obtain the current length of an insn. If branch shortening has been done,
426 get its actual length. Otherwise, get its minimum length. */
427 int
428 get_attr_min_length (rtx_insn *insn)
429 {
430 return get_attr_length_1 (insn, insn_min_length);
431 }
432 \f
433 /* Code to handle alignment inside shorten_branches. */
434
435 /* Here is an explanation how the algorithm in align_fuzz can give
436 proper results:
437
438 Call a sequence of instructions beginning with alignment point X
439 and continuing until the next alignment point `block X'. When `X'
440 is used in an expression, it means the alignment value of the
441 alignment point.
442
443 Call the distance between the start of the first insn of block X, and
444 the end of the last insn of block X `IX', for the `inner size of X'.
445 This is clearly the sum of the instruction lengths.
446
447 Likewise with the next alignment-delimited block following X, which we
448 shall call block Y.
449
450 Call the distance between the start of the first insn of block X, and
451 the start of the first insn of block Y `OX', for the `outer size of X'.
452
453 The estimated padding is then OX - IX.
454
455 OX can be safely estimated as
456
457 if (X >= Y)
458 OX = round_up(IX, Y)
459 else
460 OX = round_up(IX, X) + Y - X
461
462 Clearly est(IX) >= real(IX), because that only depends on the
463 instruction lengths, and those being overestimated is a given.
464
465 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
466 we needn't worry about that when thinking about OX.
467
468 When X >= Y, the alignment provided by Y adds no uncertainty factor
469 for branch ranges starting before X, so we can just round what we have.
470 But when X < Y, we don't know anything about the, so to speak,
471 `middle bits', so we have to assume the worst when aligning up from an
472 address mod X to one mod Y, which is Y - X. */
473
474 #ifndef LABEL_ALIGN
475 #define LABEL_ALIGN(LABEL) align_labels_log
476 #endif
477
478 #ifndef LOOP_ALIGN
479 #define LOOP_ALIGN(LABEL) align_loops_log
480 #endif
481
482 #ifndef LABEL_ALIGN_AFTER_BARRIER
483 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
484 #endif
485
486 #ifndef JUMP_ALIGN
487 #define JUMP_ALIGN(LABEL) align_jumps_log
488 #endif
489
490 int
491 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
492 {
493 return 0;
494 }
495
496 int
497 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
498 {
499 return align_loops_max_skip;
500 }
501
502 int
503 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
504 {
505 return align_labels_max_skip;
506 }
507
508 int
509 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
510 {
511 return align_jumps_max_skip;
512 }
513
514 #ifndef ADDR_VEC_ALIGN
515 static int
516 final_addr_vec_align (rtx addr_vec)
517 {
518 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
519
520 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
521 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
522 return exact_log2 (align);
523
524 }
525
526 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
527 #endif
528
529 #ifndef INSN_LENGTH_ALIGNMENT
530 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
531 #endif
532
533 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
534
535 static int min_labelno, max_labelno;
536
537 #define LABEL_TO_ALIGNMENT(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
539
540 #define LABEL_TO_MAX_SKIP(LABEL) \
541 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
542
543 /* For the benefit of port specific code do this also as a function. */
544
545 int
546 label_to_alignment (rtx label)
547 {
548 if (CODE_LABEL_NUMBER (label) <= max_labelno)
549 return LABEL_TO_ALIGNMENT (label);
550 return 0;
551 }
552
553 int
554 label_to_max_skip (rtx label)
555 {
556 if (CODE_LABEL_NUMBER (label) <= max_labelno)
557 return LABEL_TO_MAX_SKIP (label);
558 return 0;
559 }
560
561 /* The differences in addresses
562 between a branch and its target might grow or shrink depending on
563 the alignment the start insn of the range (the branch for a forward
564 branch or the label for a backward branch) starts out on; if these
565 differences are used naively, they can even oscillate infinitely.
566 We therefore want to compute a 'worst case' address difference that
567 is independent of the alignment the start insn of the range end
568 up on, and that is at least as large as the actual difference.
569 The function align_fuzz calculates the amount we have to add to the
570 naively computed difference, by traversing the part of the alignment
571 chain of the start insn of the range that is in front of the end insn
572 of the range, and considering for each alignment the maximum amount
573 that it might contribute to a size increase.
574
575 For casesi tables, we also want to know worst case minimum amounts of
576 address difference, in case a machine description wants to introduce
577 some common offset that is added to all offsets in a table.
578 For this purpose, align_fuzz with a growth argument of 0 computes the
579 appropriate adjustment. */
580
581 /* Compute the maximum delta by which the difference of the addresses of
582 START and END might grow / shrink due to a different address for start
583 which changes the size of alignment insns between START and END.
584 KNOWN_ALIGN_LOG is the alignment known for START.
585 GROWTH should be ~0 if the objective is to compute potential code size
586 increase, and 0 if the objective is to compute potential shrink.
587 The return value is undefined for any other value of GROWTH. */
588
589 static int
590 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
591 {
592 int uid = INSN_UID (start);
593 rtx align_label;
594 int known_align = 1 << known_align_log;
595 int end_shuid = INSN_SHUID (end);
596 int fuzz = 0;
597
598 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
599 {
600 int align_addr, new_align;
601
602 uid = INSN_UID (align_label);
603 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
604 if (uid_shuid[uid] > end_shuid)
605 break;
606 known_align_log = LABEL_TO_ALIGNMENT (align_label);
607 new_align = 1 << known_align_log;
608 if (new_align < known_align)
609 continue;
610 fuzz += (-align_addr ^ growth) & (new_align - known_align);
611 known_align = new_align;
612 }
613 return fuzz;
614 }
615
616 /* Compute a worst-case reference address of a branch so that it
617 can be safely used in the presence of aligned labels. Since the
618 size of the branch itself is unknown, the size of the branch is
619 not included in the range. I.e. for a forward branch, the reference
620 address is the end address of the branch as known from the previous
621 branch shortening pass, minus a value to account for possible size
622 increase due to alignment. For a backward branch, it is the start
623 address of the branch as known from the current pass, plus a value
624 to account for possible size increase due to alignment.
625 NB.: Therefore, the maximum offset allowed for backward branches needs
626 to exclude the branch size. */
627
628 int
629 insn_current_reference_address (rtx_insn *branch)
630 {
631 rtx dest;
632 int seq_uid;
633
634 if (! INSN_ADDRESSES_SET_P ())
635 return 0;
636
637 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
638 seq_uid = INSN_UID (seq);
639 if (!JUMP_P (branch))
640 /* This can happen for example on the PA; the objective is to know the
641 offset to address something in front of the start of the function.
642 Thus, we can treat it like a backward branch.
643 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
644 any alignment we'd encounter, so we skip the call to align_fuzz. */
645 return insn_current_address;
646 dest = JUMP_LABEL (branch);
647
648 /* BRANCH has no proper alignment chain set, so use SEQ.
649 BRANCH also has no INSN_SHUID. */
650 if (INSN_SHUID (seq) < INSN_SHUID (dest))
651 {
652 /* Forward branch. */
653 return (insn_last_address + insn_lengths[seq_uid]
654 - align_fuzz (seq, dest, length_unit_log, ~0));
655 }
656 else
657 {
658 /* Backward branch. */
659 return (insn_current_address
660 + align_fuzz (dest, seq, length_unit_log, ~0));
661 }
662 }
663 \f
664 /* Compute branch alignments based on frequency information in the
665 CFG. */
666
667 unsigned int
668 compute_alignments (void)
669 {
670 int log, max_skip, max_log;
671 basic_block bb;
672 int freq_max = 0;
673 int freq_threshold = 0;
674
675 if (label_align)
676 {
677 free (label_align);
678 label_align = 0;
679 }
680
681 max_labelno = max_label_num ();
682 min_labelno = get_first_label_num ();
683 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
684
685 /* If not optimizing or optimizing for size, don't assign any alignments. */
686 if (! optimize || optimize_function_for_size_p (cfun))
687 return 0;
688
689 if (dump_file)
690 {
691 dump_reg_info (dump_file);
692 dump_flow_info (dump_file, TDF_DETAILS);
693 flow_loops_dump (dump_file, NULL, 1);
694 }
695 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
696 FOR_EACH_BB_FN (bb, cfun)
697 if (bb->frequency > freq_max)
698 freq_max = bb->frequency;
699 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
700
701 if (dump_file)
702 fprintf (dump_file, "freq_max: %i\n",freq_max);
703 FOR_EACH_BB_FN (bb, cfun)
704 {
705 rtx_insn *label = BB_HEAD (bb);
706 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
707 edge e;
708 edge_iterator ei;
709
710 if (!LABEL_P (label)
711 || optimize_bb_for_size_p (bb))
712 {
713 if (dump_file)
714 fprintf (dump_file,
715 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
716 bb->index, bb->frequency, bb->loop_father->num,
717 bb_loop_depth (bb));
718 continue;
719 }
720 max_log = LABEL_ALIGN (label);
721 max_skip = targetm.asm_out.label_align_max_skip (label);
722
723 FOR_EACH_EDGE (e, ei, bb->preds)
724 {
725 if (e->flags & EDGE_FALLTHRU)
726 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
727 else
728 branch_frequency += EDGE_FREQUENCY (e);
729 }
730 if (dump_file)
731 {
732 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
733 " %2i fall %4i branch %4i",
734 bb->index, bb->frequency, bb->loop_father->num,
735 bb_loop_depth (bb),
736 fallthru_frequency, branch_frequency);
737 if (!bb->loop_father->inner && bb->loop_father->num)
738 fprintf (dump_file, " inner_loop");
739 if (bb->loop_father->header == bb)
740 fprintf (dump_file, " loop_header");
741 fprintf (dump_file, "\n");
742 }
743
744 /* There are two purposes to align block with no fallthru incoming edge:
745 1) to avoid fetch stalls when branch destination is near cache boundary
746 2) to improve cache efficiency in case the previous block is not executed
747 (so it does not need to be in the cache).
748
749 We to catch first case, we align frequently executed blocks.
750 To catch the second, we align blocks that are executed more frequently
751 than the predecessor and the predecessor is likely to not be executed
752 when function is called. */
753
754 if (!has_fallthru
755 && (branch_frequency > freq_threshold
756 || (bb->frequency > bb->prev_bb->frequency * 10
757 && (bb->prev_bb->frequency
758 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
759 {
760 log = JUMP_ALIGN (label);
761 if (dump_file)
762 fprintf (dump_file, " jump alignment added.\n");
763 if (max_log < log)
764 {
765 max_log = log;
766 max_skip = targetm.asm_out.jump_align_max_skip (label);
767 }
768 }
769 /* In case block is frequent and reached mostly by non-fallthru edge,
770 align it. It is most likely a first block of loop. */
771 if (has_fallthru
772 && !(single_succ_p (bb)
773 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
774 && optimize_bb_for_speed_p (bb)
775 && branch_frequency + fallthru_frequency > freq_threshold
776 && (branch_frequency
777 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
778 {
779 log = LOOP_ALIGN (label);
780 if (dump_file)
781 fprintf (dump_file, " internal loop alignment added.\n");
782 if (max_log < log)
783 {
784 max_log = log;
785 max_skip = targetm.asm_out.loop_align_max_skip (label);
786 }
787 }
788 LABEL_TO_ALIGNMENT (label) = max_log;
789 LABEL_TO_MAX_SKIP (label) = max_skip;
790 }
791
792 loop_optimizer_finalize ();
793 free_dominance_info (CDI_DOMINATORS);
794 return 0;
795 }
796
797 /* Grow the LABEL_ALIGN array after new labels are created. */
798
799 static void
800 grow_label_align (void)
801 {
802 int old = max_labelno;
803 int n_labels;
804 int n_old_labels;
805
806 max_labelno = max_label_num ();
807
808 n_labels = max_labelno - min_labelno + 1;
809 n_old_labels = old - min_labelno + 1;
810
811 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
812
813 /* Range of labels grows monotonically in the function. Failing here
814 means that the initialization of array got lost. */
815 gcc_assert (n_old_labels <= n_labels);
816
817 memset (label_align + n_old_labels, 0,
818 (n_labels - n_old_labels) * sizeof (struct label_alignment));
819 }
820
821 /* Update the already computed alignment information. LABEL_PAIRS is a vector
822 made up of pairs of labels for which the alignment information of the first
823 element will be copied from that of the second element. */
824
825 void
826 update_alignments (vec<rtx> &label_pairs)
827 {
828 unsigned int i = 0;
829 rtx iter, label = NULL_RTX;
830
831 if (max_labelno != max_label_num ())
832 grow_label_align ();
833
834 FOR_EACH_VEC_ELT (label_pairs, i, iter)
835 if (i & 1)
836 {
837 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
838 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
839 }
840 else
841 label = iter;
842 }
843
844 namespace {
845
846 const pass_data pass_data_compute_alignments =
847 {
848 RTL_PASS, /* type */
849 "alignments", /* name */
850 OPTGROUP_NONE, /* optinfo_flags */
851 TV_NONE, /* tv_id */
852 0, /* properties_required */
853 0, /* properties_provided */
854 0, /* properties_destroyed */
855 0, /* todo_flags_start */
856 0, /* todo_flags_finish */
857 };
858
859 class pass_compute_alignments : public rtl_opt_pass
860 {
861 public:
862 pass_compute_alignments (gcc::context *ctxt)
863 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
864 {}
865
866 /* opt_pass methods: */
867 virtual unsigned int execute (function *) { return compute_alignments (); }
868
869 }; // class pass_compute_alignments
870
871 } // anon namespace
872
873 rtl_opt_pass *
874 make_pass_compute_alignments (gcc::context *ctxt)
875 {
876 return new pass_compute_alignments (ctxt);
877 }
878
879 \f
880 /* Make a pass over all insns and compute their actual lengths by shortening
881 any branches of variable length if possible. */
882
883 /* shorten_branches might be called multiple times: for example, the SH
884 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
885 In order to do this, it needs proper length information, which it obtains
886 by calling shorten_branches. This cannot be collapsed with
887 shorten_branches itself into a single pass unless we also want to integrate
888 reorg.c, since the branch splitting exposes new instructions with delay
889 slots. */
890
891 void
892 shorten_branches (rtx_insn *first)
893 {
894 rtx_insn *insn;
895 int max_uid;
896 int i;
897 int max_log;
898 int max_skip;
899 #define MAX_CODE_ALIGN 16
900 rtx_insn *seq;
901 int something_changed = 1;
902 char *varying_length;
903 rtx body;
904 int uid;
905 rtx align_tab[MAX_CODE_ALIGN];
906
907 /* Compute maximum UID and allocate label_align / uid_shuid. */
908 max_uid = get_max_uid ();
909
910 /* Free uid_shuid before reallocating it. */
911 free (uid_shuid);
912
913 uid_shuid = XNEWVEC (int, max_uid);
914
915 if (max_labelno != max_label_num ())
916 grow_label_align ();
917
918 /* Initialize label_align and set up uid_shuid to be strictly
919 monotonically rising with insn order. */
920 /* We use max_log here to keep track of the maximum alignment we want to
921 impose on the next CODE_LABEL (or the current one if we are processing
922 the CODE_LABEL itself). */
923
924 max_log = 0;
925 max_skip = 0;
926
927 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
928 {
929 int log;
930
931 INSN_SHUID (insn) = i++;
932 if (INSN_P (insn))
933 continue;
934
935 if (LABEL_P (insn))
936 {
937 rtx_insn *next;
938 bool next_is_jumptable;
939
940 /* Merge in alignments computed by compute_alignments. */
941 log = LABEL_TO_ALIGNMENT (insn);
942 if (max_log < log)
943 {
944 max_log = log;
945 max_skip = LABEL_TO_MAX_SKIP (insn);
946 }
947
948 next = next_nonnote_insn (insn);
949 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
950 if (!next_is_jumptable)
951 {
952 log = LABEL_ALIGN (insn);
953 if (max_log < log)
954 {
955 max_log = log;
956 max_skip = targetm.asm_out.label_align_max_skip (insn);
957 }
958 }
959 /* ADDR_VECs only take room if read-only data goes into the text
960 section. */
961 if ((JUMP_TABLES_IN_TEXT_SECTION
962 || readonly_data_section == text_section)
963 && next_is_jumptable)
964 {
965 log = ADDR_VEC_ALIGN (next);
966 if (max_log < log)
967 {
968 max_log = log;
969 max_skip = targetm.asm_out.label_align_max_skip (insn);
970 }
971 }
972 LABEL_TO_ALIGNMENT (insn) = max_log;
973 LABEL_TO_MAX_SKIP (insn) = max_skip;
974 max_log = 0;
975 max_skip = 0;
976 }
977 else if (BARRIER_P (insn))
978 {
979 rtx_insn *label;
980
981 for (label = insn; label && ! INSN_P (label);
982 label = NEXT_INSN (label))
983 if (LABEL_P (label))
984 {
985 log = LABEL_ALIGN_AFTER_BARRIER (insn);
986 if (max_log < log)
987 {
988 max_log = log;
989 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
990 }
991 break;
992 }
993 }
994 }
995 if (!HAVE_ATTR_length)
996 return;
997
998 /* Allocate the rest of the arrays. */
999 insn_lengths = XNEWVEC (int, max_uid);
1000 insn_lengths_max_uid = max_uid;
1001 /* Syntax errors can lead to labels being outside of the main insn stream.
1002 Initialize insn_addresses, so that we get reproducible results. */
1003 INSN_ADDRESSES_ALLOC (max_uid);
1004
1005 varying_length = XCNEWVEC (char, max_uid);
1006
1007 /* Initialize uid_align. We scan instructions
1008 from end to start, and keep in align_tab[n] the last seen insn
1009 that does an alignment of at least n+1, i.e. the successor
1010 in the alignment chain for an insn that does / has a known
1011 alignment of n. */
1012 uid_align = XCNEWVEC (rtx, max_uid);
1013
1014 for (i = MAX_CODE_ALIGN; --i >= 0;)
1015 align_tab[i] = NULL_RTX;
1016 seq = get_last_insn ();
1017 for (; seq; seq = PREV_INSN (seq))
1018 {
1019 int uid = INSN_UID (seq);
1020 int log;
1021 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1022 uid_align[uid] = align_tab[0];
1023 if (log)
1024 {
1025 /* Found an alignment label. */
1026 uid_align[uid] = align_tab[log];
1027 for (i = log - 1; i >= 0; i--)
1028 align_tab[i] = seq;
1029 }
1030 }
1031
1032 /* When optimizing, we start assuming minimum length, and keep increasing
1033 lengths as we find the need for this, till nothing changes.
1034 When not optimizing, we start assuming maximum lengths, and
1035 do a single pass to update the lengths. */
1036 bool increasing = optimize != 0;
1037
1038 #ifdef CASE_VECTOR_SHORTEN_MODE
1039 if (optimize)
1040 {
1041 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1042 label fields. */
1043
1044 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1045 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1046 int rel;
1047
1048 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1049 {
1050 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1051 int len, i, min, max, insn_shuid;
1052 int min_align;
1053 addr_diff_vec_flags flags;
1054
1055 if (! JUMP_TABLE_DATA_P (insn)
1056 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1057 continue;
1058 pat = PATTERN (insn);
1059 len = XVECLEN (pat, 1);
1060 gcc_assert (len > 0);
1061 min_align = MAX_CODE_ALIGN;
1062 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1063 {
1064 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1065 int shuid = INSN_SHUID (lab);
1066 if (shuid < min)
1067 {
1068 min = shuid;
1069 min_lab = lab;
1070 }
1071 if (shuid > max)
1072 {
1073 max = shuid;
1074 max_lab = lab;
1075 }
1076 if (min_align > LABEL_TO_ALIGNMENT (lab))
1077 min_align = LABEL_TO_ALIGNMENT (lab);
1078 }
1079 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1080 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1081 insn_shuid = INSN_SHUID (insn);
1082 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1083 memset (&flags, 0, sizeof (flags));
1084 flags.min_align = min_align;
1085 flags.base_after_vec = rel > insn_shuid;
1086 flags.min_after_vec = min > insn_shuid;
1087 flags.max_after_vec = max > insn_shuid;
1088 flags.min_after_base = min > rel;
1089 flags.max_after_base = max > rel;
1090 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1091
1092 if (increasing)
1093 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1094 }
1095 }
1096 #endif /* CASE_VECTOR_SHORTEN_MODE */
1097
1098 /* Compute initial lengths, addresses, and varying flags for each insn. */
1099 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1100
1101 for (insn_current_address = 0, insn = first;
1102 insn != 0;
1103 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1104 {
1105 uid = INSN_UID (insn);
1106
1107 insn_lengths[uid] = 0;
1108
1109 if (LABEL_P (insn))
1110 {
1111 int log = LABEL_TO_ALIGNMENT (insn);
1112 if (log)
1113 {
1114 int align = 1 << log;
1115 int new_address = (insn_current_address + align - 1) & -align;
1116 insn_lengths[uid] = new_address - insn_current_address;
1117 }
1118 }
1119
1120 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1121
1122 if (NOTE_P (insn) || BARRIER_P (insn)
1123 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1124 continue;
1125 if (insn->deleted ())
1126 continue;
1127
1128 body = PATTERN (insn);
1129 if (JUMP_TABLE_DATA_P (insn))
1130 {
1131 /* This only takes room if read-only data goes into the text
1132 section. */
1133 if (JUMP_TABLES_IN_TEXT_SECTION
1134 || readonly_data_section == text_section)
1135 insn_lengths[uid] = (XVECLEN (body,
1136 GET_CODE (body) == ADDR_DIFF_VEC)
1137 * GET_MODE_SIZE (GET_MODE (body)));
1138 /* Alignment is handled by ADDR_VEC_ALIGN. */
1139 }
1140 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1141 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1142 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1143 {
1144 int i;
1145 int const_delay_slots;
1146 if (DELAY_SLOTS)
1147 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1148 else
1149 const_delay_slots = 0;
1150
1151 int (*inner_length_fun) (rtx_insn *)
1152 = const_delay_slots ? length_fun : insn_default_length;
1153 /* Inside a delay slot sequence, we do not do any branch shortening
1154 if the shortening could change the number of delay slots
1155 of the branch. */
1156 for (i = 0; i < body_seq->len (); i++)
1157 {
1158 rtx_insn *inner_insn = body_seq->insn (i);
1159 int inner_uid = INSN_UID (inner_insn);
1160 int inner_length;
1161
1162 if (GET_CODE (body) == ASM_INPUT
1163 || asm_noperands (PATTERN (inner_insn)) >= 0)
1164 inner_length = (asm_insn_count (PATTERN (inner_insn))
1165 * insn_default_length (inner_insn));
1166 else
1167 inner_length = inner_length_fun (inner_insn);
1168
1169 insn_lengths[inner_uid] = inner_length;
1170 if (const_delay_slots)
1171 {
1172 if ((varying_length[inner_uid]
1173 = insn_variable_length_p (inner_insn)) != 0)
1174 varying_length[uid] = 1;
1175 INSN_ADDRESSES (inner_uid) = (insn_current_address
1176 + insn_lengths[uid]);
1177 }
1178 else
1179 varying_length[inner_uid] = 0;
1180 insn_lengths[uid] += inner_length;
1181 }
1182 }
1183 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1184 {
1185 insn_lengths[uid] = length_fun (insn);
1186 varying_length[uid] = insn_variable_length_p (insn);
1187 }
1188
1189 /* If needed, do any adjustment. */
1190 #ifdef ADJUST_INSN_LENGTH
1191 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1192 if (insn_lengths[uid] < 0)
1193 fatal_insn ("negative insn length", insn);
1194 #endif
1195 }
1196
1197 /* Now loop over all the insns finding varying length insns. For each,
1198 get the current insn length. If it has changed, reflect the change.
1199 When nothing changes for a full pass, we are done. */
1200
1201 while (something_changed)
1202 {
1203 something_changed = 0;
1204 insn_current_align = MAX_CODE_ALIGN - 1;
1205 for (insn_current_address = 0, insn = first;
1206 insn != 0;
1207 insn = NEXT_INSN (insn))
1208 {
1209 int new_length;
1210 #ifdef ADJUST_INSN_LENGTH
1211 int tmp_length;
1212 #endif
1213 int length_align;
1214
1215 uid = INSN_UID (insn);
1216
1217 if (LABEL_P (insn))
1218 {
1219 int log = LABEL_TO_ALIGNMENT (insn);
1220
1221 #ifdef CASE_VECTOR_SHORTEN_MODE
1222 /* If the mode of a following jump table was changed, we
1223 may need to update the alignment of this label. */
1224 rtx_insn *next;
1225 bool next_is_jumptable;
1226
1227 next = next_nonnote_insn (insn);
1228 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1229 if ((JUMP_TABLES_IN_TEXT_SECTION
1230 || readonly_data_section == text_section)
1231 && next_is_jumptable)
1232 {
1233 int newlog = ADDR_VEC_ALIGN (next);
1234 if (newlog != log)
1235 {
1236 log = newlog;
1237 LABEL_TO_ALIGNMENT (insn) = log;
1238 something_changed = 1;
1239 }
1240 }
1241 #endif
1242
1243 if (log > insn_current_align)
1244 {
1245 int align = 1 << log;
1246 int new_address= (insn_current_address + align - 1) & -align;
1247 insn_lengths[uid] = new_address - insn_current_address;
1248 insn_current_align = log;
1249 insn_current_address = new_address;
1250 }
1251 else
1252 insn_lengths[uid] = 0;
1253 INSN_ADDRESSES (uid) = insn_current_address;
1254 continue;
1255 }
1256
1257 length_align = INSN_LENGTH_ALIGNMENT (insn);
1258 if (length_align < insn_current_align)
1259 insn_current_align = length_align;
1260
1261 insn_last_address = INSN_ADDRESSES (uid);
1262 INSN_ADDRESSES (uid) = insn_current_address;
1263
1264 #ifdef CASE_VECTOR_SHORTEN_MODE
1265 if (optimize
1266 && JUMP_TABLE_DATA_P (insn)
1267 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1268 {
1269 rtx body = PATTERN (insn);
1270 int old_length = insn_lengths[uid];
1271 rtx_insn *rel_lab =
1272 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1273 rtx min_lab = XEXP (XEXP (body, 2), 0);
1274 rtx max_lab = XEXP (XEXP (body, 3), 0);
1275 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1276 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1277 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1278 rtx_insn *prev;
1279 int rel_align = 0;
1280 addr_diff_vec_flags flags;
1281 machine_mode vec_mode;
1282
1283 /* Avoid automatic aggregate initialization. */
1284 flags = ADDR_DIFF_VEC_FLAGS (body);
1285
1286 /* Try to find a known alignment for rel_lab. */
1287 for (prev = rel_lab;
1288 prev
1289 && ! insn_lengths[INSN_UID (prev)]
1290 && ! (varying_length[INSN_UID (prev)] & 1);
1291 prev = PREV_INSN (prev))
1292 if (varying_length[INSN_UID (prev)] & 2)
1293 {
1294 rel_align = LABEL_TO_ALIGNMENT (prev);
1295 break;
1296 }
1297
1298 /* See the comment on addr_diff_vec_flags in rtl.h for the
1299 meaning of the flags values. base: REL_LAB vec: INSN */
1300 /* Anything after INSN has still addresses from the last
1301 pass; adjust these so that they reflect our current
1302 estimate for this pass. */
1303 if (flags.base_after_vec)
1304 rel_addr += insn_current_address - insn_last_address;
1305 if (flags.min_after_vec)
1306 min_addr += insn_current_address - insn_last_address;
1307 if (flags.max_after_vec)
1308 max_addr += insn_current_address - insn_last_address;
1309 /* We want to know the worst case, i.e. lowest possible value
1310 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1311 its offset is positive, and we have to be wary of code shrink;
1312 otherwise, it is negative, and we have to be vary of code
1313 size increase. */
1314 if (flags.min_after_base)
1315 {
1316 /* If INSN is between REL_LAB and MIN_LAB, the size
1317 changes we are about to make can change the alignment
1318 within the observed offset, therefore we have to break
1319 it up into two parts that are independent. */
1320 if (! flags.base_after_vec && flags.min_after_vec)
1321 {
1322 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1323 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1324 }
1325 else
1326 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1327 }
1328 else
1329 {
1330 if (flags.base_after_vec && ! flags.min_after_vec)
1331 {
1332 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1333 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1334 }
1335 else
1336 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1337 }
1338 /* Likewise, determine the highest lowest possible value
1339 for the offset of MAX_LAB. */
1340 if (flags.max_after_base)
1341 {
1342 if (! flags.base_after_vec && flags.max_after_vec)
1343 {
1344 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1345 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1346 }
1347 else
1348 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1349 }
1350 else
1351 {
1352 if (flags.base_after_vec && ! flags.max_after_vec)
1353 {
1354 max_addr += align_fuzz (max_lab, insn, 0, 0);
1355 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1356 }
1357 else
1358 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1359 }
1360 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1361 max_addr - rel_addr, body);
1362 if (!increasing
1363 || (GET_MODE_SIZE (vec_mode)
1364 >= GET_MODE_SIZE (GET_MODE (body))))
1365 PUT_MODE (body, vec_mode);
1366 if (JUMP_TABLES_IN_TEXT_SECTION
1367 || readonly_data_section == text_section)
1368 {
1369 insn_lengths[uid]
1370 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1371 insn_current_address += insn_lengths[uid];
1372 if (insn_lengths[uid] != old_length)
1373 something_changed = 1;
1374 }
1375
1376 continue;
1377 }
1378 #endif /* CASE_VECTOR_SHORTEN_MODE */
1379
1380 if (! (varying_length[uid]))
1381 {
1382 if (NONJUMP_INSN_P (insn)
1383 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1384 {
1385 int i;
1386
1387 body = PATTERN (insn);
1388 for (i = 0; i < XVECLEN (body, 0); i++)
1389 {
1390 rtx inner_insn = XVECEXP (body, 0, i);
1391 int inner_uid = INSN_UID (inner_insn);
1392
1393 INSN_ADDRESSES (inner_uid) = insn_current_address;
1394
1395 insn_current_address += insn_lengths[inner_uid];
1396 }
1397 }
1398 else
1399 insn_current_address += insn_lengths[uid];
1400
1401 continue;
1402 }
1403
1404 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1405 {
1406 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1407 int i;
1408
1409 body = PATTERN (insn);
1410 new_length = 0;
1411 for (i = 0; i < seqn->len (); i++)
1412 {
1413 rtx_insn *inner_insn = seqn->insn (i);
1414 int inner_uid = INSN_UID (inner_insn);
1415 int inner_length;
1416
1417 INSN_ADDRESSES (inner_uid) = insn_current_address;
1418
1419 /* insn_current_length returns 0 for insns with a
1420 non-varying length. */
1421 if (! varying_length[inner_uid])
1422 inner_length = insn_lengths[inner_uid];
1423 else
1424 inner_length = insn_current_length (inner_insn);
1425
1426 if (inner_length != insn_lengths[inner_uid])
1427 {
1428 if (!increasing || inner_length > insn_lengths[inner_uid])
1429 {
1430 insn_lengths[inner_uid] = inner_length;
1431 something_changed = 1;
1432 }
1433 else
1434 inner_length = insn_lengths[inner_uid];
1435 }
1436 insn_current_address += inner_length;
1437 new_length += inner_length;
1438 }
1439 }
1440 else
1441 {
1442 new_length = insn_current_length (insn);
1443 insn_current_address += new_length;
1444 }
1445
1446 #ifdef ADJUST_INSN_LENGTH
1447 /* If needed, do any adjustment. */
1448 tmp_length = new_length;
1449 ADJUST_INSN_LENGTH (insn, new_length);
1450 insn_current_address += (new_length - tmp_length);
1451 #endif
1452
1453 if (new_length != insn_lengths[uid]
1454 && (!increasing || new_length > insn_lengths[uid]))
1455 {
1456 insn_lengths[uid] = new_length;
1457 something_changed = 1;
1458 }
1459 else
1460 insn_current_address += insn_lengths[uid] - new_length;
1461 }
1462 /* For a non-optimizing compile, do only a single pass. */
1463 if (!increasing)
1464 break;
1465 }
1466
1467 free (varying_length);
1468 }
1469
1470 /* Given the body of an INSN known to be generated by an ASM statement, return
1471 the number of machine instructions likely to be generated for this insn.
1472 This is used to compute its length. */
1473
1474 static int
1475 asm_insn_count (rtx body)
1476 {
1477 const char *templ;
1478
1479 if (GET_CODE (body) == ASM_INPUT)
1480 templ = XSTR (body, 0);
1481 else
1482 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1483
1484 return asm_str_count (templ);
1485 }
1486
1487 /* Return the number of machine instructions likely to be generated for the
1488 inline-asm template. */
1489 int
1490 asm_str_count (const char *templ)
1491 {
1492 int count = 1;
1493
1494 if (!*templ)
1495 return 0;
1496
1497 for (; *templ; templ++)
1498 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1499 || *templ == '\n')
1500 count++;
1501
1502 return count;
1503 }
1504 \f
1505 /* ??? This is probably the wrong place for these. */
1506 /* Structure recording the mapping from source file and directory
1507 names at compile time to those to be embedded in debug
1508 information. */
1509 struct debug_prefix_map
1510 {
1511 const char *old_prefix;
1512 const char *new_prefix;
1513 size_t old_len;
1514 size_t new_len;
1515 struct debug_prefix_map *next;
1516 };
1517
1518 /* Linked list of such structures. */
1519 static debug_prefix_map *debug_prefix_maps;
1520
1521
1522 /* Record a debug file prefix mapping. ARG is the argument to
1523 -fdebug-prefix-map and must be of the form OLD=NEW. */
1524
1525 void
1526 add_debug_prefix_map (const char *arg)
1527 {
1528 debug_prefix_map *map;
1529 const char *p;
1530
1531 p = strchr (arg, '=');
1532 if (!p)
1533 {
1534 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1535 return;
1536 }
1537 map = XNEW (debug_prefix_map);
1538 map->old_prefix = xstrndup (arg, p - arg);
1539 map->old_len = p - arg;
1540 p++;
1541 map->new_prefix = xstrdup (p);
1542 map->new_len = strlen (p);
1543 map->next = debug_prefix_maps;
1544 debug_prefix_maps = map;
1545 }
1546
1547 /* Perform user-specified mapping of debug filename prefixes. Return
1548 the new name corresponding to FILENAME. */
1549
1550 const char *
1551 remap_debug_filename (const char *filename)
1552 {
1553 debug_prefix_map *map;
1554 char *s;
1555 const char *name;
1556 size_t name_len;
1557
1558 for (map = debug_prefix_maps; map; map = map->next)
1559 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1560 break;
1561 if (!map)
1562 return filename;
1563 name = filename + map->old_len;
1564 name_len = strlen (name) + 1;
1565 s = (char *) alloca (name_len + map->new_len);
1566 memcpy (s, map->new_prefix, map->new_len);
1567 memcpy (s + map->new_len, name, name_len);
1568 return ggc_strdup (s);
1569 }
1570 \f
1571 /* Return true if DWARF2 debug info can be emitted for DECL. */
1572
1573 static bool
1574 dwarf2_debug_info_emitted_p (tree decl)
1575 {
1576 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1577 return false;
1578
1579 if (DECL_IGNORED_P (decl))
1580 return false;
1581
1582 return true;
1583 }
1584
1585 /* Return scope resulting from combination of S1 and S2. */
1586 static tree
1587 choose_inner_scope (tree s1, tree s2)
1588 {
1589 if (!s1)
1590 return s2;
1591 if (!s2)
1592 return s1;
1593 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1594 return s1;
1595 return s2;
1596 }
1597
1598 /* Emit lexical block notes needed to change scope from S1 to S2. */
1599
1600 static void
1601 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1602 {
1603 rtx_insn *insn = orig_insn;
1604 tree com = NULL_TREE;
1605 tree ts1 = s1, ts2 = s2;
1606 tree s;
1607
1608 while (ts1 != ts2)
1609 {
1610 gcc_assert (ts1 && ts2);
1611 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1612 ts1 = BLOCK_SUPERCONTEXT (ts1);
1613 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1614 ts2 = BLOCK_SUPERCONTEXT (ts2);
1615 else
1616 {
1617 ts1 = BLOCK_SUPERCONTEXT (ts1);
1618 ts2 = BLOCK_SUPERCONTEXT (ts2);
1619 }
1620 }
1621 com = ts1;
1622
1623 /* Close scopes. */
1624 s = s1;
1625 while (s != com)
1626 {
1627 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1628 NOTE_BLOCK (note) = s;
1629 s = BLOCK_SUPERCONTEXT (s);
1630 }
1631
1632 /* Open scopes. */
1633 s = s2;
1634 while (s != com)
1635 {
1636 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1637 NOTE_BLOCK (insn) = s;
1638 s = BLOCK_SUPERCONTEXT (s);
1639 }
1640 }
1641
1642 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1643 on the scope tree and the newly reordered instructions. */
1644
1645 static void
1646 reemit_insn_block_notes (void)
1647 {
1648 tree cur_block = DECL_INITIAL (cfun->decl);
1649 rtx_insn *insn;
1650 rtx_note *note;
1651
1652 insn = get_insns ();
1653 for (; insn; insn = NEXT_INSN (insn))
1654 {
1655 tree this_block;
1656
1657 /* Prevent lexical blocks from straddling section boundaries. */
1658 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1659 {
1660 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1661 s = BLOCK_SUPERCONTEXT (s))
1662 {
1663 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1664 NOTE_BLOCK (note) = s;
1665 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1666 NOTE_BLOCK (note) = s;
1667 }
1668 }
1669
1670 if (!active_insn_p (insn))
1671 continue;
1672
1673 /* Avoid putting scope notes between jump table and its label. */
1674 if (JUMP_TABLE_DATA_P (insn))
1675 continue;
1676
1677 this_block = insn_scope (insn);
1678 /* For sequences compute scope resulting from merging all scopes
1679 of instructions nested inside. */
1680 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1681 {
1682 int i;
1683
1684 this_block = NULL;
1685 for (i = 0; i < body->len (); i++)
1686 this_block = choose_inner_scope (this_block,
1687 insn_scope (body->insn (i)));
1688 }
1689 if (! this_block)
1690 {
1691 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1692 continue;
1693 else
1694 this_block = DECL_INITIAL (cfun->decl);
1695 }
1696
1697 if (this_block != cur_block)
1698 {
1699 change_scope (insn, cur_block, this_block);
1700 cur_block = this_block;
1701 }
1702 }
1703
1704 /* change_scope emits before the insn, not after. */
1705 note = emit_note (NOTE_INSN_DELETED);
1706 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1707 delete_insn (note);
1708
1709 reorder_blocks ();
1710 }
1711
1712 static const char *some_local_dynamic_name;
1713
1714 /* Locate some local-dynamic symbol still in use by this function
1715 so that we can print its name in local-dynamic base patterns.
1716 Return null if there are no local-dynamic references. */
1717
1718 const char *
1719 get_some_local_dynamic_name ()
1720 {
1721 subrtx_iterator::array_type array;
1722 rtx_insn *insn;
1723
1724 if (some_local_dynamic_name)
1725 return some_local_dynamic_name;
1726
1727 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1728 if (NONDEBUG_INSN_P (insn))
1729 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1730 {
1731 const_rtx x = *iter;
1732 if (GET_CODE (x) == SYMBOL_REF)
1733 {
1734 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1735 return some_local_dynamic_name = XSTR (x, 0);
1736 if (CONSTANT_POOL_ADDRESS_P (x))
1737 iter.substitute (get_pool_constant (x));
1738 }
1739 }
1740
1741 return 0;
1742 }
1743
1744 /* Output assembler code for the start of a function,
1745 and initialize some of the variables in this file
1746 for the new function. The label for the function and associated
1747 assembler pseudo-ops have already been output in `assemble_start_function'.
1748
1749 FIRST is the first insn of the rtl for the function being compiled.
1750 FILE is the file to write assembler code to.
1751 OPTIMIZE_P is nonzero if we should eliminate redundant
1752 test and compare insns. */
1753
1754 void
1755 final_start_function (rtx_insn *first, FILE *file,
1756 int optimize_p ATTRIBUTE_UNUSED)
1757 {
1758 block_depth = 0;
1759
1760 this_is_asm_operands = 0;
1761
1762 need_profile_function = false;
1763
1764 last_filename = LOCATION_FILE (prologue_location);
1765 last_linenum = LOCATION_LINE (prologue_location);
1766 last_discriminator = discriminator = 0;
1767
1768 high_block_linenum = high_function_linenum = last_linenum;
1769
1770 if (flag_sanitize & SANITIZE_ADDRESS)
1771 asan_function_start ();
1772
1773 if (!DECL_IGNORED_P (current_function_decl))
1774 debug_hooks->begin_prologue (last_linenum, last_filename);
1775
1776 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1777 dwarf2out_begin_prologue (0, NULL);
1778
1779 #ifdef LEAF_REG_REMAP
1780 if (crtl->uses_only_leaf_regs)
1781 leaf_renumber_regs (first);
1782 #endif
1783
1784 /* The Sun386i and perhaps other machines don't work right
1785 if the profiling code comes after the prologue. */
1786 if (targetm.profile_before_prologue () && crtl->profile)
1787 {
1788 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1789 && targetm.have_prologue ())
1790 {
1791 rtx_insn *insn;
1792 for (insn = first; insn; insn = NEXT_INSN (insn))
1793 if (!NOTE_P (insn))
1794 {
1795 insn = NULL;
1796 break;
1797 }
1798 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1799 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1800 break;
1801 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1802 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1803 continue;
1804 else
1805 {
1806 insn = NULL;
1807 break;
1808 }
1809
1810 if (insn)
1811 need_profile_function = true;
1812 else
1813 profile_function (file);
1814 }
1815 else
1816 profile_function (file);
1817 }
1818
1819 /* If debugging, assign block numbers to all of the blocks in this
1820 function. */
1821 if (write_symbols)
1822 {
1823 reemit_insn_block_notes ();
1824 number_blocks (current_function_decl);
1825 /* We never actually put out begin/end notes for the top-level
1826 block in the function. But, conceptually, that block is
1827 always needed. */
1828 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1829 }
1830
1831 if (warn_frame_larger_than
1832 && get_frame_size () > frame_larger_than_size)
1833 {
1834 /* Issue a warning */
1835 warning (OPT_Wframe_larger_than_,
1836 "the frame size of %wd bytes is larger than %wd bytes",
1837 get_frame_size (), frame_larger_than_size);
1838 }
1839
1840 /* First output the function prologue: code to set up the stack frame. */
1841 targetm.asm_out.function_prologue (file, get_frame_size ());
1842
1843 /* If the machine represents the prologue as RTL, the profiling code must
1844 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1845 if (! targetm.have_prologue ())
1846 profile_after_prologue (file);
1847 }
1848
1849 static void
1850 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1851 {
1852 if (!targetm.profile_before_prologue () && crtl->profile)
1853 profile_function (file);
1854 }
1855
1856 static void
1857 profile_function (FILE *file ATTRIBUTE_UNUSED)
1858 {
1859 #ifndef NO_PROFILE_COUNTERS
1860 # define NO_PROFILE_COUNTERS 0
1861 #endif
1862 #ifdef ASM_OUTPUT_REG_PUSH
1863 rtx sval = NULL, chain = NULL;
1864
1865 if (cfun->returns_struct)
1866 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1867 true);
1868 if (cfun->static_chain_decl)
1869 chain = targetm.calls.static_chain (current_function_decl, true);
1870 #endif /* ASM_OUTPUT_REG_PUSH */
1871
1872 if (! NO_PROFILE_COUNTERS)
1873 {
1874 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1875 switch_to_section (data_section);
1876 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1877 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1878 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1879 }
1880
1881 switch_to_section (current_function_section ());
1882
1883 #ifdef ASM_OUTPUT_REG_PUSH
1884 if (sval && REG_P (sval))
1885 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1886 if (chain && REG_P (chain))
1887 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1888 #endif
1889
1890 FUNCTION_PROFILER (file, current_function_funcdef_no);
1891
1892 #ifdef ASM_OUTPUT_REG_PUSH
1893 if (chain && REG_P (chain))
1894 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1895 if (sval && REG_P (sval))
1896 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1897 #endif
1898 }
1899
1900 /* Output assembler code for the end of a function.
1901 For clarity, args are same as those of `final_start_function'
1902 even though not all of them are needed. */
1903
1904 void
1905 final_end_function (void)
1906 {
1907 app_disable ();
1908
1909 if (!DECL_IGNORED_P (current_function_decl))
1910 debug_hooks->end_function (high_function_linenum);
1911
1912 /* Finally, output the function epilogue:
1913 code to restore the stack frame and return to the caller. */
1914 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1915
1916 /* And debug output. */
1917 if (!DECL_IGNORED_P (current_function_decl))
1918 debug_hooks->end_epilogue (last_linenum, last_filename);
1919
1920 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1921 && dwarf2out_do_frame ())
1922 dwarf2out_end_epilogue (last_linenum, last_filename);
1923
1924 some_local_dynamic_name = 0;
1925 }
1926 \f
1927
1928 /* Dumper helper for basic block information. FILE is the assembly
1929 output file, and INSN is the instruction being emitted. */
1930
1931 static void
1932 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1933 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1934 {
1935 basic_block bb;
1936
1937 if (!flag_debug_asm)
1938 return;
1939
1940 if (INSN_UID (insn) < bb_map_size
1941 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1942 {
1943 edge e;
1944 edge_iterator ei;
1945
1946 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1947 if (bb->frequency)
1948 fprintf (file, " freq:%d", bb->frequency);
1949 if (bb->count)
1950 fprintf (file, " count:%" PRId64,
1951 bb->count);
1952 fprintf (file, " seq:%d", (*bb_seqn)++);
1953 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1954 FOR_EACH_EDGE (e, ei, bb->preds)
1955 {
1956 dump_edge_info (file, e, TDF_DETAILS, 0);
1957 }
1958 fprintf (file, "\n");
1959 }
1960 if (INSN_UID (insn) < bb_map_size
1961 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1962 {
1963 edge e;
1964 edge_iterator ei;
1965
1966 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1967 FOR_EACH_EDGE (e, ei, bb->succs)
1968 {
1969 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1970 }
1971 fprintf (file, "\n");
1972 }
1973 }
1974
1975 /* Output assembler code for some insns: all or part of a function.
1976 For description of args, see `final_start_function', above. */
1977
1978 void
1979 final (rtx_insn *first, FILE *file, int optimize_p)
1980 {
1981 rtx_insn *insn, *next;
1982 int seen = 0;
1983
1984 /* Used for -dA dump. */
1985 basic_block *start_to_bb = NULL;
1986 basic_block *end_to_bb = NULL;
1987 int bb_map_size = 0;
1988 int bb_seqn = 0;
1989
1990 last_ignored_compare = 0;
1991
1992 if (HAVE_cc0)
1993 for (insn = first; insn; insn = NEXT_INSN (insn))
1994 {
1995 /* If CC tracking across branches is enabled, record the insn which
1996 jumps to each branch only reached from one place. */
1997 if (optimize_p && JUMP_P (insn))
1998 {
1999 rtx lab = JUMP_LABEL (insn);
2000 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2001 {
2002 LABEL_REFS (lab) = insn;
2003 }
2004 }
2005 }
2006
2007 init_recog ();
2008
2009 CC_STATUS_INIT;
2010
2011 if (flag_debug_asm)
2012 {
2013 basic_block bb;
2014
2015 bb_map_size = get_max_uid () + 1;
2016 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2017 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2018
2019 /* There is no cfg for a thunk. */
2020 if (!cfun->is_thunk)
2021 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2022 {
2023 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2024 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2025 }
2026 }
2027
2028 /* Output the insns. */
2029 for (insn = first; insn;)
2030 {
2031 if (HAVE_ATTR_length)
2032 {
2033 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2034 {
2035 /* This can be triggered by bugs elsewhere in the compiler if
2036 new insns are created after init_insn_lengths is called. */
2037 gcc_assert (NOTE_P (insn));
2038 insn_current_address = -1;
2039 }
2040 else
2041 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2042 }
2043
2044 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2045 bb_map_size, &bb_seqn);
2046 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2047 }
2048
2049 if (flag_debug_asm)
2050 {
2051 free (start_to_bb);
2052 free (end_to_bb);
2053 }
2054
2055 /* Remove CFI notes, to avoid compare-debug failures. */
2056 for (insn = first; insn; insn = next)
2057 {
2058 next = NEXT_INSN (insn);
2059 if (NOTE_P (insn)
2060 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2061 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2062 delete_insn (insn);
2063 }
2064 }
2065 \f
2066 const char *
2067 get_insn_template (int code, rtx insn)
2068 {
2069 switch (insn_data[code].output_format)
2070 {
2071 case INSN_OUTPUT_FORMAT_SINGLE:
2072 return insn_data[code].output.single;
2073 case INSN_OUTPUT_FORMAT_MULTI:
2074 return insn_data[code].output.multi[which_alternative];
2075 case INSN_OUTPUT_FORMAT_FUNCTION:
2076 gcc_assert (insn);
2077 return (*insn_data[code].output.function) (recog_data.operand,
2078 as_a <rtx_insn *> (insn));
2079
2080 default:
2081 gcc_unreachable ();
2082 }
2083 }
2084
2085 /* Emit the appropriate declaration for an alternate-entry-point
2086 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2087 LABEL_KIND != LABEL_NORMAL.
2088
2089 The case fall-through in this function is intentional. */
2090 static void
2091 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2092 {
2093 const char *name = LABEL_NAME (insn);
2094
2095 switch (LABEL_KIND (insn))
2096 {
2097 case LABEL_WEAK_ENTRY:
2098 #ifdef ASM_WEAKEN_LABEL
2099 ASM_WEAKEN_LABEL (file, name);
2100 #endif
2101 case LABEL_GLOBAL_ENTRY:
2102 targetm.asm_out.globalize_label (file, name);
2103 case LABEL_STATIC_ENTRY:
2104 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2105 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2106 #endif
2107 ASM_OUTPUT_LABEL (file, name);
2108 break;
2109
2110 case LABEL_NORMAL:
2111 default:
2112 gcc_unreachable ();
2113 }
2114 }
2115
2116 /* Given a CALL_INSN, find and return the nested CALL. */
2117 static rtx
2118 call_from_call_insn (rtx_call_insn *insn)
2119 {
2120 rtx x;
2121 gcc_assert (CALL_P (insn));
2122 x = PATTERN (insn);
2123
2124 while (GET_CODE (x) != CALL)
2125 {
2126 switch (GET_CODE (x))
2127 {
2128 default:
2129 gcc_unreachable ();
2130 case COND_EXEC:
2131 x = COND_EXEC_CODE (x);
2132 break;
2133 case PARALLEL:
2134 x = XVECEXP (x, 0, 0);
2135 break;
2136 case SET:
2137 x = XEXP (x, 1);
2138 break;
2139 }
2140 }
2141 return x;
2142 }
2143
2144 /* The final scan for one insn, INSN.
2145 Args are same as in `final', except that INSN
2146 is the insn being scanned.
2147 Value returned is the next insn to be scanned.
2148
2149 NOPEEPHOLES is the flag to disallow peephole processing (currently
2150 used for within delayed branch sequence output).
2151
2152 SEEN is used to track the end of the prologue, for emitting
2153 debug information. We force the emission of a line note after
2154 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2155
2156 rtx_insn *
2157 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2158 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2159 {
2160 #if HAVE_cc0
2161 rtx set;
2162 #endif
2163 rtx_insn *next;
2164
2165 insn_counter++;
2166
2167 /* Ignore deleted insns. These can occur when we split insns (due to a
2168 template of "#") while not optimizing. */
2169 if (insn->deleted ())
2170 return NEXT_INSN (insn);
2171
2172 switch (GET_CODE (insn))
2173 {
2174 case NOTE:
2175 switch (NOTE_KIND (insn))
2176 {
2177 case NOTE_INSN_DELETED:
2178 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2179 break;
2180
2181 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2182 in_cold_section_p = !in_cold_section_p;
2183
2184 if (dwarf2out_do_frame ())
2185 dwarf2out_switch_text_section ();
2186 else if (!DECL_IGNORED_P (current_function_decl))
2187 debug_hooks->switch_text_section ();
2188
2189 switch_to_section (current_function_section ());
2190 targetm.asm_out.function_switched_text_sections (asm_out_file,
2191 current_function_decl,
2192 in_cold_section_p);
2193 /* Emit a label for the split cold section. Form label name by
2194 suffixing "cold" to the original function's name. */
2195 if (in_cold_section_p)
2196 {
2197 cold_function_name
2198 = clone_function_name (current_function_decl, "cold");
2199 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2200 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2201 IDENTIFIER_POINTER
2202 (cold_function_name),
2203 current_function_decl);
2204 #else
2205 ASM_OUTPUT_LABEL (asm_out_file,
2206 IDENTIFIER_POINTER (cold_function_name));
2207 #endif
2208 }
2209 break;
2210
2211 case NOTE_INSN_BASIC_BLOCK:
2212 if (need_profile_function)
2213 {
2214 profile_function (asm_out_file);
2215 need_profile_function = false;
2216 }
2217
2218 if (targetm.asm_out.unwind_emit)
2219 targetm.asm_out.unwind_emit (asm_out_file, insn);
2220
2221 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2222
2223 break;
2224
2225 case NOTE_INSN_EH_REGION_BEG:
2226 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2227 NOTE_EH_HANDLER (insn));
2228 break;
2229
2230 case NOTE_INSN_EH_REGION_END:
2231 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2232 NOTE_EH_HANDLER (insn));
2233 break;
2234
2235 case NOTE_INSN_PROLOGUE_END:
2236 targetm.asm_out.function_end_prologue (file);
2237 profile_after_prologue (file);
2238
2239 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2240 {
2241 *seen |= SEEN_EMITTED;
2242 force_source_line = true;
2243 }
2244 else
2245 *seen |= SEEN_NOTE;
2246
2247 break;
2248
2249 case NOTE_INSN_EPILOGUE_BEG:
2250 if (!DECL_IGNORED_P (current_function_decl))
2251 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2252 targetm.asm_out.function_begin_epilogue (file);
2253 break;
2254
2255 case NOTE_INSN_CFI:
2256 dwarf2out_emit_cfi (NOTE_CFI (insn));
2257 break;
2258
2259 case NOTE_INSN_CFI_LABEL:
2260 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2261 NOTE_LABEL_NUMBER (insn));
2262 break;
2263
2264 case NOTE_INSN_FUNCTION_BEG:
2265 if (need_profile_function)
2266 {
2267 profile_function (asm_out_file);
2268 need_profile_function = false;
2269 }
2270
2271 app_disable ();
2272 if (!DECL_IGNORED_P (current_function_decl))
2273 debug_hooks->end_prologue (last_linenum, last_filename);
2274
2275 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2276 {
2277 *seen |= SEEN_EMITTED;
2278 force_source_line = true;
2279 }
2280 else
2281 *seen |= SEEN_NOTE;
2282
2283 break;
2284
2285 case NOTE_INSN_BLOCK_BEG:
2286 if (debug_info_level == DINFO_LEVEL_NORMAL
2287 || debug_info_level == DINFO_LEVEL_VERBOSE
2288 || write_symbols == DWARF2_DEBUG
2289 || write_symbols == VMS_AND_DWARF2_DEBUG
2290 || write_symbols == VMS_DEBUG)
2291 {
2292 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2293
2294 app_disable ();
2295 ++block_depth;
2296 high_block_linenum = last_linenum;
2297
2298 /* Output debugging info about the symbol-block beginning. */
2299 if (!DECL_IGNORED_P (current_function_decl))
2300 debug_hooks->begin_block (last_linenum, n);
2301
2302 /* Mark this block as output. */
2303 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2304 }
2305 if (write_symbols == DBX_DEBUG
2306 || write_symbols == SDB_DEBUG)
2307 {
2308 location_t *locus_ptr
2309 = block_nonartificial_location (NOTE_BLOCK (insn));
2310
2311 if (locus_ptr != NULL)
2312 {
2313 override_filename = LOCATION_FILE (*locus_ptr);
2314 override_linenum = LOCATION_LINE (*locus_ptr);
2315 }
2316 }
2317 break;
2318
2319 case NOTE_INSN_BLOCK_END:
2320 if (debug_info_level == DINFO_LEVEL_NORMAL
2321 || debug_info_level == DINFO_LEVEL_VERBOSE
2322 || write_symbols == DWARF2_DEBUG
2323 || write_symbols == VMS_AND_DWARF2_DEBUG
2324 || write_symbols == VMS_DEBUG)
2325 {
2326 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2327
2328 app_disable ();
2329
2330 /* End of a symbol-block. */
2331 --block_depth;
2332 gcc_assert (block_depth >= 0);
2333
2334 if (!DECL_IGNORED_P (current_function_decl))
2335 debug_hooks->end_block (high_block_linenum, n);
2336 }
2337 if (write_symbols == DBX_DEBUG
2338 || write_symbols == SDB_DEBUG)
2339 {
2340 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2341 location_t *locus_ptr
2342 = block_nonartificial_location (outer_block);
2343
2344 if (locus_ptr != NULL)
2345 {
2346 override_filename = LOCATION_FILE (*locus_ptr);
2347 override_linenum = LOCATION_LINE (*locus_ptr);
2348 }
2349 else
2350 {
2351 override_filename = NULL;
2352 override_linenum = 0;
2353 }
2354 }
2355 break;
2356
2357 case NOTE_INSN_DELETED_LABEL:
2358 /* Emit the label. We may have deleted the CODE_LABEL because
2359 the label could be proved to be unreachable, though still
2360 referenced (in the form of having its address taken. */
2361 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2362 break;
2363
2364 case NOTE_INSN_DELETED_DEBUG_LABEL:
2365 /* Similarly, but need to use different namespace for it. */
2366 if (CODE_LABEL_NUMBER (insn) != -1)
2367 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2368 break;
2369
2370 case NOTE_INSN_VAR_LOCATION:
2371 case NOTE_INSN_CALL_ARG_LOCATION:
2372 if (!DECL_IGNORED_P (current_function_decl))
2373 debug_hooks->var_location (insn);
2374 break;
2375
2376 default:
2377 gcc_unreachable ();
2378 break;
2379 }
2380 break;
2381
2382 case BARRIER:
2383 break;
2384
2385 case CODE_LABEL:
2386 /* The target port might emit labels in the output function for
2387 some insn, e.g. sh.c output_branchy_insn. */
2388 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2389 {
2390 int align = LABEL_TO_ALIGNMENT (insn);
2391 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2392 int max_skip = LABEL_TO_MAX_SKIP (insn);
2393 #endif
2394
2395 if (align && NEXT_INSN (insn))
2396 {
2397 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2398 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2399 #else
2400 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2401 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2402 #else
2403 ASM_OUTPUT_ALIGN (file, align);
2404 #endif
2405 #endif
2406 }
2407 }
2408 CC_STATUS_INIT;
2409
2410 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2411 debug_hooks->label (as_a <rtx_code_label *> (insn));
2412
2413 app_disable ();
2414
2415 next = next_nonnote_insn (insn);
2416 /* If this label is followed by a jump-table, make sure we put
2417 the label in the read-only section. Also possibly write the
2418 label and jump table together. */
2419 if (next != 0 && JUMP_TABLE_DATA_P (next))
2420 {
2421 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2422 /* In this case, the case vector is being moved by the
2423 target, so don't output the label at all. Leave that
2424 to the back end macros. */
2425 #else
2426 if (! JUMP_TABLES_IN_TEXT_SECTION)
2427 {
2428 int log_align;
2429
2430 switch_to_section (targetm.asm_out.function_rodata_section
2431 (current_function_decl));
2432
2433 #ifdef ADDR_VEC_ALIGN
2434 log_align = ADDR_VEC_ALIGN (next);
2435 #else
2436 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2437 #endif
2438 ASM_OUTPUT_ALIGN (file, log_align);
2439 }
2440 else
2441 switch_to_section (current_function_section ());
2442
2443 #ifdef ASM_OUTPUT_CASE_LABEL
2444 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2445 next);
2446 #else
2447 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2448 #endif
2449 #endif
2450 break;
2451 }
2452 if (LABEL_ALT_ENTRY_P (insn))
2453 output_alternate_entry_point (file, insn);
2454 else
2455 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2456 break;
2457
2458 default:
2459 {
2460 rtx body = PATTERN (insn);
2461 int insn_code_number;
2462 const char *templ;
2463 bool is_stmt;
2464
2465 /* Reset this early so it is correct for ASM statements. */
2466 current_insn_predicate = NULL_RTX;
2467
2468 /* An INSN, JUMP_INSN or CALL_INSN.
2469 First check for special kinds that recog doesn't recognize. */
2470
2471 if (GET_CODE (body) == USE /* These are just declarations. */
2472 || GET_CODE (body) == CLOBBER)
2473 break;
2474
2475 #if HAVE_cc0
2476 {
2477 /* If there is a REG_CC_SETTER note on this insn, it means that
2478 the setting of the condition code was done in the delay slot
2479 of the insn that branched here. So recover the cc status
2480 from the insn that set it. */
2481
2482 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2483 if (note)
2484 {
2485 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2486 NOTICE_UPDATE_CC (PATTERN (other), other);
2487 cc_prev_status = cc_status;
2488 }
2489 }
2490 #endif
2491
2492 /* Detect insns that are really jump-tables
2493 and output them as such. */
2494
2495 if (JUMP_TABLE_DATA_P (insn))
2496 {
2497 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2498 int vlen, idx;
2499 #endif
2500
2501 if (! JUMP_TABLES_IN_TEXT_SECTION)
2502 switch_to_section (targetm.asm_out.function_rodata_section
2503 (current_function_decl));
2504 else
2505 switch_to_section (current_function_section ());
2506
2507 app_disable ();
2508
2509 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2510 if (GET_CODE (body) == ADDR_VEC)
2511 {
2512 #ifdef ASM_OUTPUT_ADDR_VEC
2513 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2514 #else
2515 gcc_unreachable ();
2516 #endif
2517 }
2518 else
2519 {
2520 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2521 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2522 #else
2523 gcc_unreachable ();
2524 #endif
2525 }
2526 #else
2527 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2528 for (idx = 0; idx < vlen; idx++)
2529 {
2530 if (GET_CODE (body) == ADDR_VEC)
2531 {
2532 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2533 ASM_OUTPUT_ADDR_VEC_ELT
2534 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2535 #else
2536 gcc_unreachable ();
2537 #endif
2538 }
2539 else
2540 {
2541 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2542 ASM_OUTPUT_ADDR_DIFF_ELT
2543 (file,
2544 body,
2545 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2546 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2547 #else
2548 gcc_unreachable ();
2549 #endif
2550 }
2551 }
2552 #ifdef ASM_OUTPUT_CASE_END
2553 ASM_OUTPUT_CASE_END (file,
2554 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2555 insn);
2556 #endif
2557 #endif
2558
2559 switch_to_section (current_function_section ());
2560
2561 break;
2562 }
2563 /* Output this line note if it is the first or the last line
2564 note in a row. */
2565 if (!DECL_IGNORED_P (current_function_decl)
2566 && notice_source_line (insn, &is_stmt))
2567 (*debug_hooks->source_line) (last_linenum, last_filename,
2568 last_discriminator, is_stmt);
2569
2570 if (GET_CODE (body) == ASM_INPUT)
2571 {
2572 const char *string = XSTR (body, 0);
2573
2574 /* There's no telling what that did to the condition codes. */
2575 CC_STATUS_INIT;
2576
2577 if (string[0])
2578 {
2579 expanded_location loc;
2580
2581 app_enable ();
2582 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2583 if (*loc.file && loc.line)
2584 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2585 ASM_COMMENT_START, loc.line, loc.file);
2586 fprintf (asm_out_file, "\t%s\n", string);
2587 #if HAVE_AS_LINE_ZERO
2588 if (*loc.file && loc.line)
2589 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2590 #endif
2591 }
2592 break;
2593 }
2594
2595 /* Detect `asm' construct with operands. */
2596 if (asm_noperands (body) >= 0)
2597 {
2598 unsigned int noperands = asm_noperands (body);
2599 rtx *ops = XALLOCAVEC (rtx, noperands);
2600 const char *string;
2601 location_t loc;
2602 expanded_location expanded;
2603
2604 /* There's no telling what that did to the condition codes. */
2605 CC_STATUS_INIT;
2606
2607 /* Get out the operand values. */
2608 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2609 /* Inhibit dying on what would otherwise be compiler bugs. */
2610 insn_noperands = noperands;
2611 this_is_asm_operands = insn;
2612 expanded = expand_location (loc);
2613
2614 #ifdef FINAL_PRESCAN_INSN
2615 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2616 #endif
2617
2618 /* Output the insn using them. */
2619 if (string[0])
2620 {
2621 app_enable ();
2622 if (expanded.file && expanded.line)
2623 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2624 ASM_COMMENT_START, expanded.line, expanded.file);
2625 output_asm_insn (string, ops);
2626 #if HAVE_AS_LINE_ZERO
2627 if (expanded.file && expanded.line)
2628 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2629 #endif
2630 }
2631
2632 if (targetm.asm_out.final_postscan_insn)
2633 targetm.asm_out.final_postscan_insn (file, insn, ops,
2634 insn_noperands);
2635
2636 this_is_asm_operands = 0;
2637 break;
2638 }
2639
2640 app_disable ();
2641
2642 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2643 {
2644 /* A delayed-branch sequence */
2645 int i;
2646
2647 final_sequence = seq;
2648
2649 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2650 force the restoration of a comparison that was previously
2651 thought unnecessary. If that happens, cancel this sequence
2652 and cause that insn to be restored. */
2653
2654 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2655 if (next != seq->insn (1))
2656 {
2657 final_sequence = 0;
2658 return next;
2659 }
2660
2661 for (i = 1; i < seq->len (); i++)
2662 {
2663 rtx_insn *insn = seq->insn (i);
2664 rtx_insn *next = NEXT_INSN (insn);
2665 /* We loop in case any instruction in a delay slot gets
2666 split. */
2667 do
2668 insn = final_scan_insn (insn, file, 0, 1, seen);
2669 while (insn != next);
2670 }
2671 #ifdef DBR_OUTPUT_SEQEND
2672 DBR_OUTPUT_SEQEND (file);
2673 #endif
2674 final_sequence = 0;
2675
2676 /* If the insn requiring the delay slot was a CALL_INSN, the
2677 insns in the delay slot are actually executed before the
2678 called function. Hence we don't preserve any CC-setting
2679 actions in these insns and the CC must be marked as being
2680 clobbered by the function. */
2681 if (CALL_P (seq->insn (0)))
2682 {
2683 CC_STATUS_INIT;
2684 }
2685 break;
2686 }
2687
2688 /* We have a real machine instruction as rtl. */
2689
2690 body = PATTERN (insn);
2691
2692 #if HAVE_cc0
2693 set = single_set (insn);
2694
2695 /* Check for redundant test and compare instructions
2696 (when the condition codes are already set up as desired).
2697 This is done only when optimizing; if not optimizing,
2698 it should be possible for the user to alter a variable
2699 with the debugger in between statements
2700 and the next statement should reexamine the variable
2701 to compute the condition codes. */
2702
2703 if (optimize_p)
2704 {
2705 if (set
2706 && GET_CODE (SET_DEST (set)) == CC0
2707 && insn != last_ignored_compare)
2708 {
2709 rtx src1, src2;
2710 if (GET_CODE (SET_SRC (set)) == SUBREG)
2711 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2712
2713 src1 = SET_SRC (set);
2714 src2 = NULL_RTX;
2715 if (GET_CODE (SET_SRC (set)) == COMPARE)
2716 {
2717 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2718 XEXP (SET_SRC (set), 0)
2719 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2720 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2721 XEXP (SET_SRC (set), 1)
2722 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2723 if (XEXP (SET_SRC (set), 1)
2724 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2725 src2 = XEXP (SET_SRC (set), 0);
2726 }
2727 if ((cc_status.value1 != 0
2728 && rtx_equal_p (src1, cc_status.value1))
2729 || (cc_status.value2 != 0
2730 && rtx_equal_p (src1, cc_status.value2))
2731 || (src2 != 0 && cc_status.value1 != 0
2732 && rtx_equal_p (src2, cc_status.value1))
2733 || (src2 != 0 && cc_status.value2 != 0
2734 && rtx_equal_p (src2, cc_status.value2)))
2735 {
2736 /* Don't delete insn if it has an addressing side-effect. */
2737 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2738 /* or if anything in it is volatile. */
2739 && ! volatile_refs_p (PATTERN (insn)))
2740 {
2741 /* We don't really delete the insn; just ignore it. */
2742 last_ignored_compare = insn;
2743 break;
2744 }
2745 }
2746 }
2747 }
2748
2749 /* If this is a conditional branch, maybe modify it
2750 if the cc's are in a nonstandard state
2751 so that it accomplishes the same thing that it would
2752 do straightforwardly if the cc's were set up normally. */
2753
2754 if (cc_status.flags != 0
2755 && JUMP_P (insn)
2756 && GET_CODE (body) == SET
2757 && SET_DEST (body) == pc_rtx
2758 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2759 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2760 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2761 {
2762 /* This function may alter the contents of its argument
2763 and clear some of the cc_status.flags bits.
2764 It may also return 1 meaning condition now always true
2765 or -1 meaning condition now always false
2766 or 2 meaning condition nontrivial but altered. */
2767 int result = alter_cond (XEXP (SET_SRC (body), 0));
2768 /* If condition now has fixed value, replace the IF_THEN_ELSE
2769 with its then-operand or its else-operand. */
2770 if (result == 1)
2771 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2772 if (result == -1)
2773 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2774
2775 /* The jump is now either unconditional or a no-op.
2776 If it has become a no-op, don't try to output it.
2777 (It would not be recognized.) */
2778 if (SET_SRC (body) == pc_rtx)
2779 {
2780 delete_insn (insn);
2781 break;
2782 }
2783 else if (ANY_RETURN_P (SET_SRC (body)))
2784 /* Replace (set (pc) (return)) with (return). */
2785 PATTERN (insn) = body = SET_SRC (body);
2786
2787 /* Rerecognize the instruction if it has changed. */
2788 if (result != 0)
2789 INSN_CODE (insn) = -1;
2790 }
2791
2792 /* If this is a conditional trap, maybe modify it if the cc's
2793 are in a nonstandard state so that it accomplishes the same
2794 thing that it would do straightforwardly if the cc's were
2795 set up normally. */
2796 if (cc_status.flags != 0
2797 && NONJUMP_INSN_P (insn)
2798 && GET_CODE (body) == TRAP_IF
2799 && COMPARISON_P (TRAP_CONDITION (body))
2800 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2801 {
2802 /* This function may alter the contents of its argument
2803 and clear some of the cc_status.flags bits.
2804 It may also return 1 meaning condition now always true
2805 or -1 meaning condition now always false
2806 or 2 meaning condition nontrivial but altered. */
2807 int result = alter_cond (TRAP_CONDITION (body));
2808
2809 /* If TRAP_CONDITION has become always false, delete the
2810 instruction. */
2811 if (result == -1)
2812 {
2813 delete_insn (insn);
2814 break;
2815 }
2816
2817 /* If TRAP_CONDITION has become always true, replace
2818 TRAP_CONDITION with const_true_rtx. */
2819 if (result == 1)
2820 TRAP_CONDITION (body) = const_true_rtx;
2821
2822 /* Rerecognize the instruction if it has changed. */
2823 if (result != 0)
2824 INSN_CODE (insn) = -1;
2825 }
2826
2827 /* Make same adjustments to instructions that examine the
2828 condition codes without jumping and instructions that
2829 handle conditional moves (if this machine has either one). */
2830
2831 if (cc_status.flags != 0
2832 && set != 0)
2833 {
2834 rtx cond_rtx, then_rtx, else_rtx;
2835
2836 if (!JUMP_P (insn)
2837 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2838 {
2839 cond_rtx = XEXP (SET_SRC (set), 0);
2840 then_rtx = XEXP (SET_SRC (set), 1);
2841 else_rtx = XEXP (SET_SRC (set), 2);
2842 }
2843 else
2844 {
2845 cond_rtx = SET_SRC (set);
2846 then_rtx = const_true_rtx;
2847 else_rtx = const0_rtx;
2848 }
2849
2850 if (COMPARISON_P (cond_rtx)
2851 && XEXP (cond_rtx, 0) == cc0_rtx)
2852 {
2853 int result;
2854 result = alter_cond (cond_rtx);
2855 if (result == 1)
2856 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2857 else if (result == -1)
2858 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2859 else if (result == 2)
2860 INSN_CODE (insn) = -1;
2861 if (SET_DEST (set) == SET_SRC (set))
2862 delete_insn (insn);
2863 }
2864 }
2865
2866 #endif
2867
2868 /* Do machine-specific peephole optimizations if desired. */
2869
2870 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2871 {
2872 rtx_insn *next = peephole (insn);
2873 /* When peepholing, if there were notes within the peephole,
2874 emit them before the peephole. */
2875 if (next != 0 && next != NEXT_INSN (insn))
2876 {
2877 rtx_insn *note, *prev = PREV_INSN (insn);
2878
2879 for (note = NEXT_INSN (insn); note != next;
2880 note = NEXT_INSN (note))
2881 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2882
2883 /* Put the notes in the proper position for a later
2884 rescan. For example, the SH target can do this
2885 when generating a far jump in a delayed branch
2886 sequence. */
2887 note = NEXT_INSN (insn);
2888 SET_PREV_INSN (note) = prev;
2889 SET_NEXT_INSN (prev) = note;
2890 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2891 SET_PREV_INSN (insn) = PREV_INSN (next);
2892 SET_NEXT_INSN (insn) = next;
2893 SET_PREV_INSN (next) = insn;
2894 }
2895
2896 /* PEEPHOLE might have changed this. */
2897 body = PATTERN (insn);
2898 }
2899
2900 /* Try to recognize the instruction.
2901 If successful, verify that the operands satisfy the
2902 constraints for the instruction. Crash if they don't,
2903 since `reload' should have changed them so that they do. */
2904
2905 insn_code_number = recog_memoized (insn);
2906 cleanup_subreg_operands (insn);
2907
2908 /* Dump the insn in the assembly for debugging (-dAP).
2909 If the final dump is requested as slim RTL, dump slim
2910 RTL to the assembly file also. */
2911 if (flag_dump_rtl_in_asm)
2912 {
2913 print_rtx_head = ASM_COMMENT_START;
2914 if (! (dump_flags & TDF_SLIM))
2915 print_rtl_single (asm_out_file, insn);
2916 else
2917 dump_insn_slim (asm_out_file, insn);
2918 print_rtx_head = "";
2919 }
2920
2921 if (! constrain_operands_cached (insn, 1))
2922 fatal_insn_not_found (insn);
2923
2924 /* Some target machines need to prescan each insn before
2925 it is output. */
2926
2927 #ifdef FINAL_PRESCAN_INSN
2928 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2929 #endif
2930
2931 if (targetm.have_conditional_execution ()
2932 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2933 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2934
2935 #if HAVE_cc0
2936 cc_prev_status = cc_status;
2937
2938 /* Update `cc_status' for this instruction.
2939 The instruction's output routine may change it further.
2940 If the output routine for a jump insn needs to depend
2941 on the cc status, it should look at cc_prev_status. */
2942
2943 NOTICE_UPDATE_CC (body, insn);
2944 #endif
2945
2946 current_output_insn = debug_insn = insn;
2947
2948 /* Find the proper template for this insn. */
2949 templ = get_insn_template (insn_code_number, insn);
2950
2951 /* If the C code returns 0, it means that it is a jump insn
2952 which follows a deleted test insn, and that test insn
2953 needs to be reinserted. */
2954 if (templ == 0)
2955 {
2956 rtx_insn *prev;
2957
2958 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2959
2960 /* We have already processed the notes between the setter and
2961 the user. Make sure we don't process them again, this is
2962 particularly important if one of the notes is a block
2963 scope note or an EH note. */
2964 for (prev = insn;
2965 prev != last_ignored_compare;
2966 prev = PREV_INSN (prev))
2967 {
2968 if (NOTE_P (prev))
2969 delete_insn (prev); /* Use delete_note. */
2970 }
2971
2972 return prev;
2973 }
2974
2975 /* If the template is the string "#", it means that this insn must
2976 be split. */
2977 if (templ[0] == '#' && templ[1] == '\0')
2978 {
2979 rtx_insn *new_rtx = try_split (body, insn, 0);
2980
2981 /* If we didn't split the insn, go away. */
2982 if (new_rtx == insn && PATTERN (new_rtx) == body)
2983 fatal_insn ("could not split insn", insn);
2984
2985 /* If we have a length attribute, this instruction should have
2986 been split in shorten_branches, to ensure that we would have
2987 valid length info for the splitees. */
2988 gcc_assert (!HAVE_ATTR_length);
2989
2990 return new_rtx;
2991 }
2992
2993 /* ??? This will put the directives in the wrong place if
2994 get_insn_template outputs assembly directly. However calling it
2995 before get_insn_template breaks if the insns is split. */
2996 if (targetm.asm_out.unwind_emit_before_insn
2997 && targetm.asm_out.unwind_emit)
2998 targetm.asm_out.unwind_emit (asm_out_file, insn);
2999
3000 if (rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn))
3001 {
3002 rtx x = call_from_call_insn (call_insn);
3003 x = XEXP (x, 0);
3004 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3005 {
3006 tree t;
3007 x = XEXP (x, 0);
3008 t = SYMBOL_REF_DECL (x);
3009 if (t)
3010 assemble_external (t);
3011 }
3012 if (!DECL_IGNORED_P (current_function_decl))
3013 debug_hooks->var_location (insn);
3014 }
3015
3016 /* Output assembler code from the template. */
3017 output_asm_insn (templ, recog_data.operand);
3018
3019 /* Some target machines need to postscan each insn after
3020 it is output. */
3021 if (targetm.asm_out.final_postscan_insn)
3022 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3023 recog_data.n_operands);
3024
3025 if (!targetm.asm_out.unwind_emit_before_insn
3026 && targetm.asm_out.unwind_emit)
3027 targetm.asm_out.unwind_emit (asm_out_file, insn);
3028
3029 current_output_insn = debug_insn = 0;
3030 }
3031 }
3032 return NEXT_INSN (insn);
3033 }
3034 \f
3035 /* Return whether a source line note needs to be emitted before INSN.
3036 Sets IS_STMT to TRUE if the line should be marked as a possible
3037 breakpoint location. */
3038
3039 static bool
3040 notice_source_line (rtx_insn *insn, bool *is_stmt)
3041 {
3042 const char *filename;
3043 int linenum;
3044
3045 if (override_filename)
3046 {
3047 filename = override_filename;
3048 linenum = override_linenum;
3049 }
3050 else if (INSN_HAS_LOCATION (insn))
3051 {
3052 expanded_location xloc = insn_location (insn);
3053 filename = xloc.file;
3054 linenum = xloc.line;
3055 }
3056 else
3057 {
3058 filename = NULL;
3059 linenum = 0;
3060 }
3061
3062 if (filename == NULL)
3063 return false;
3064
3065 if (force_source_line
3066 || filename != last_filename
3067 || last_linenum != linenum)
3068 {
3069 force_source_line = false;
3070 last_filename = filename;
3071 last_linenum = linenum;
3072 last_discriminator = discriminator;
3073 *is_stmt = true;
3074 high_block_linenum = MAX (last_linenum, high_block_linenum);
3075 high_function_linenum = MAX (last_linenum, high_function_linenum);
3076 return true;
3077 }
3078
3079 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3080 {
3081 /* If the discriminator changed, but the line number did not,
3082 output the line table entry with is_stmt false so the
3083 debugger does not treat this as a breakpoint location. */
3084 last_discriminator = discriminator;
3085 *is_stmt = false;
3086 return true;
3087 }
3088
3089 return false;
3090 }
3091 \f
3092 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3093 directly to the desired hard register. */
3094
3095 void
3096 cleanup_subreg_operands (rtx_insn *insn)
3097 {
3098 int i;
3099 bool changed = false;
3100 extract_insn_cached (insn);
3101 for (i = 0; i < recog_data.n_operands; i++)
3102 {
3103 /* The following test cannot use recog_data.operand when testing
3104 for a SUBREG: the underlying object might have been changed
3105 already if we are inside a match_operator expression that
3106 matches the else clause. Instead we test the underlying
3107 expression directly. */
3108 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3109 {
3110 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3111 changed = true;
3112 }
3113 else if (GET_CODE (recog_data.operand[i]) == PLUS
3114 || GET_CODE (recog_data.operand[i]) == MULT
3115 || MEM_P (recog_data.operand[i]))
3116 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3117 }
3118
3119 for (i = 0; i < recog_data.n_dups; i++)
3120 {
3121 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3122 {
3123 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3124 changed = true;
3125 }
3126 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3127 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3128 || MEM_P (*recog_data.dup_loc[i]))
3129 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3130 }
3131 if (changed)
3132 df_insn_rescan (insn);
3133 }
3134
3135 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3136 the thing it is a subreg of. Do it anyway if FINAL_P. */
3137
3138 rtx
3139 alter_subreg (rtx *xp, bool final_p)
3140 {
3141 rtx x = *xp;
3142 rtx y = SUBREG_REG (x);
3143
3144 /* simplify_subreg does not remove subreg from volatile references.
3145 We are required to. */
3146 if (MEM_P (y))
3147 {
3148 int offset = SUBREG_BYTE (x);
3149
3150 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3151 contains 0 instead of the proper offset. See simplify_subreg. */
3152 if (offset == 0
3153 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3154 {
3155 int difference = GET_MODE_SIZE (GET_MODE (y))
3156 - GET_MODE_SIZE (GET_MODE (x));
3157 if (WORDS_BIG_ENDIAN)
3158 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3159 if (BYTES_BIG_ENDIAN)
3160 offset += difference % UNITS_PER_WORD;
3161 }
3162
3163 if (final_p)
3164 *xp = adjust_address (y, GET_MODE (x), offset);
3165 else
3166 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3167 }
3168 else if (REG_P (y) && HARD_REGISTER_P (y))
3169 {
3170 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3171 SUBREG_BYTE (x));
3172
3173 if (new_rtx != 0)
3174 *xp = new_rtx;
3175 else if (final_p && REG_P (y))
3176 {
3177 /* Simplify_subreg can't handle some REG cases, but we have to. */
3178 unsigned int regno;
3179 HOST_WIDE_INT offset;
3180
3181 regno = subreg_regno (x);
3182 if (subreg_lowpart_p (x))
3183 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3184 else
3185 offset = SUBREG_BYTE (x);
3186 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3187 }
3188 }
3189
3190 return *xp;
3191 }
3192
3193 /* Do alter_subreg on all the SUBREGs contained in X. */
3194
3195 static rtx
3196 walk_alter_subreg (rtx *xp, bool *changed)
3197 {
3198 rtx x = *xp;
3199 switch (GET_CODE (x))
3200 {
3201 case PLUS:
3202 case MULT:
3203 case AND:
3204 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3205 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3206 break;
3207
3208 case MEM:
3209 case ZERO_EXTEND:
3210 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3211 break;
3212
3213 case SUBREG:
3214 *changed = true;
3215 return alter_subreg (xp, true);
3216
3217 default:
3218 break;
3219 }
3220
3221 return *xp;
3222 }
3223 \f
3224 #if HAVE_cc0
3225
3226 /* Given BODY, the body of a jump instruction, alter the jump condition
3227 as required by the bits that are set in cc_status.flags.
3228 Not all of the bits there can be handled at this level in all cases.
3229
3230 The value is normally 0.
3231 1 means that the condition has become always true.
3232 -1 means that the condition has become always false.
3233 2 means that COND has been altered. */
3234
3235 static int
3236 alter_cond (rtx cond)
3237 {
3238 int value = 0;
3239
3240 if (cc_status.flags & CC_REVERSED)
3241 {
3242 value = 2;
3243 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3244 }
3245
3246 if (cc_status.flags & CC_INVERTED)
3247 {
3248 value = 2;
3249 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3250 }
3251
3252 if (cc_status.flags & CC_NOT_POSITIVE)
3253 switch (GET_CODE (cond))
3254 {
3255 case LE:
3256 case LEU:
3257 case GEU:
3258 /* Jump becomes unconditional. */
3259 return 1;
3260
3261 case GT:
3262 case GTU:
3263 case LTU:
3264 /* Jump becomes no-op. */
3265 return -1;
3266
3267 case GE:
3268 PUT_CODE (cond, EQ);
3269 value = 2;
3270 break;
3271
3272 case LT:
3273 PUT_CODE (cond, NE);
3274 value = 2;
3275 break;
3276
3277 default:
3278 break;
3279 }
3280
3281 if (cc_status.flags & CC_NOT_NEGATIVE)
3282 switch (GET_CODE (cond))
3283 {
3284 case GE:
3285 case GEU:
3286 /* Jump becomes unconditional. */
3287 return 1;
3288
3289 case LT:
3290 case LTU:
3291 /* Jump becomes no-op. */
3292 return -1;
3293
3294 case LE:
3295 case LEU:
3296 PUT_CODE (cond, EQ);
3297 value = 2;
3298 break;
3299
3300 case GT:
3301 case GTU:
3302 PUT_CODE (cond, NE);
3303 value = 2;
3304 break;
3305
3306 default:
3307 break;
3308 }
3309
3310 if (cc_status.flags & CC_NO_OVERFLOW)
3311 switch (GET_CODE (cond))
3312 {
3313 case GEU:
3314 /* Jump becomes unconditional. */
3315 return 1;
3316
3317 case LEU:
3318 PUT_CODE (cond, EQ);
3319 value = 2;
3320 break;
3321
3322 case GTU:
3323 PUT_CODE (cond, NE);
3324 value = 2;
3325 break;
3326
3327 case LTU:
3328 /* Jump becomes no-op. */
3329 return -1;
3330
3331 default:
3332 break;
3333 }
3334
3335 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3336 switch (GET_CODE (cond))
3337 {
3338 default:
3339 gcc_unreachable ();
3340
3341 case NE:
3342 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3343 value = 2;
3344 break;
3345
3346 case EQ:
3347 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3348 value = 2;
3349 break;
3350 }
3351
3352 if (cc_status.flags & CC_NOT_SIGNED)
3353 /* The flags are valid if signed condition operators are converted
3354 to unsigned. */
3355 switch (GET_CODE (cond))
3356 {
3357 case LE:
3358 PUT_CODE (cond, LEU);
3359 value = 2;
3360 break;
3361
3362 case LT:
3363 PUT_CODE (cond, LTU);
3364 value = 2;
3365 break;
3366
3367 case GT:
3368 PUT_CODE (cond, GTU);
3369 value = 2;
3370 break;
3371
3372 case GE:
3373 PUT_CODE (cond, GEU);
3374 value = 2;
3375 break;
3376
3377 default:
3378 break;
3379 }
3380
3381 return value;
3382 }
3383 #endif
3384 \f
3385 /* Report inconsistency between the assembler template and the operands.
3386 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3387
3388 void
3389 output_operand_lossage (const char *cmsgid, ...)
3390 {
3391 char *fmt_string;
3392 char *new_message;
3393 const char *pfx_str;
3394 va_list ap;
3395
3396 va_start (ap, cmsgid);
3397
3398 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3399 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3400 new_message = xvasprintf (fmt_string, ap);
3401
3402 if (this_is_asm_operands)
3403 error_for_asm (this_is_asm_operands, "%s", new_message);
3404 else
3405 internal_error ("%s", new_message);
3406
3407 free (fmt_string);
3408 free (new_message);
3409 va_end (ap);
3410 }
3411 \f
3412 /* Output of assembler code from a template, and its subroutines. */
3413
3414 /* Annotate the assembly with a comment describing the pattern and
3415 alternative used. */
3416
3417 static void
3418 output_asm_name (void)
3419 {
3420 if (debug_insn)
3421 {
3422 int num = INSN_CODE (debug_insn);
3423 fprintf (asm_out_file, "\t%s %d\t%s",
3424 ASM_COMMENT_START, INSN_UID (debug_insn),
3425 insn_data[num].name);
3426 if (insn_data[num].n_alternatives > 1)
3427 fprintf (asm_out_file, "/%d", which_alternative + 1);
3428
3429 if (HAVE_ATTR_length)
3430 fprintf (asm_out_file, "\t[length = %d]",
3431 get_attr_length (debug_insn));
3432
3433 /* Clear this so only the first assembler insn
3434 of any rtl insn will get the special comment for -dp. */
3435 debug_insn = 0;
3436 }
3437 }
3438
3439 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3440 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3441 corresponds to the address of the object and 0 if to the object. */
3442
3443 static tree
3444 get_mem_expr_from_op (rtx op, int *paddressp)
3445 {
3446 tree expr;
3447 int inner_addressp;
3448
3449 *paddressp = 0;
3450
3451 if (REG_P (op))
3452 return REG_EXPR (op);
3453 else if (!MEM_P (op))
3454 return 0;
3455
3456 if (MEM_EXPR (op) != 0)
3457 return MEM_EXPR (op);
3458
3459 /* Otherwise we have an address, so indicate it and look at the address. */
3460 *paddressp = 1;
3461 op = XEXP (op, 0);
3462
3463 /* First check if we have a decl for the address, then look at the right side
3464 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3465 But don't allow the address to itself be indirect. */
3466 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3467 return expr;
3468 else if (GET_CODE (op) == PLUS
3469 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3470 return expr;
3471
3472 while (UNARY_P (op)
3473 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3474 op = XEXP (op, 0);
3475
3476 expr = get_mem_expr_from_op (op, &inner_addressp);
3477 return inner_addressp ? 0 : expr;
3478 }
3479
3480 /* Output operand names for assembler instructions. OPERANDS is the
3481 operand vector, OPORDER is the order to write the operands, and NOPS
3482 is the number of operands to write. */
3483
3484 static void
3485 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3486 {
3487 int wrote = 0;
3488 int i;
3489
3490 for (i = 0; i < nops; i++)
3491 {
3492 int addressp;
3493 rtx op = operands[oporder[i]];
3494 tree expr = get_mem_expr_from_op (op, &addressp);
3495
3496 fprintf (asm_out_file, "%c%s",
3497 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3498 wrote = 1;
3499 if (expr)
3500 {
3501 fprintf (asm_out_file, "%s",
3502 addressp ? "*" : "");
3503 print_mem_expr (asm_out_file, expr);
3504 wrote = 1;
3505 }
3506 else if (REG_P (op) && ORIGINAL_REGNO (op)
3507 && ORIGINAL_REGNO (op) != REGNO (op))
3508 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3509 }
3510 }
3511
3512 #ifdef ASSEMBLER_DIALECT
3513 /* Helper function to parse assembler dialects in the asm string.
3514 This is called from output_asm_insn and asm_fprintf. */
3515 static const char *
3516 do_assembler_dialects (const char *p, int *dialect)
3517 {
3518 char c = *(p - 1);
3519
3520 switch (c)
3521 {
3522 case '{':
3523 {
3524 int i;
3525
3526 if (*dialect)
3527 output_operand_lossage ("nested assembly dialect alternatives");
3528 else
3529 *dialect = 1;
3530
3531 /* If we want the first dialect, do nothing. Otherwise, skip
3532 DIALECT_NUMBER of strings ending with '|'. */
3533 for (i = 0; i < dialect_number; i++)
3534 {
3535 while (*p && *p != '}')
3536 {
3537 if (*p == '|')
3538 {
3539 p++;
3540 break;
3541 }
3542
3543 /* Skip over any character after a percent sign. */
3544 if (*p == '%')
3545 p++;
3546 if (*p)
3547 p++;
3548 }
3549
3550 if (*p == '}')
3551 break;
3552 }
3553
3554 if (*p == '\0')
3555 output_operand_lossage ("unterminated assembly dialect alternative");
3556 }
3557 break;
3558
3559 case '|':
3560 if (*dialect)
3561 {
3562 /* Skip to close brace. */
3563 do
3564 {
3565 if (*p == '\0')
3566 {
3567 output_operand_lossage ("unterminated assembly dialect alternative");
3568 break;
3569 }
3570
3571 /* Skip over any character after a percent sign. */
3572 if (*p == '%' && p[1])
3573 {
3574 p += 2;
3575 continue;
3576 }
3577
3578 if (*p++ == '}')
3579 break;
3580 }
3581 while (1);
3582
3583 *dialect = 0;
3584 }
3585 else
3586 putc (c, asm_out_file);
3587 break;
3588
3589 case '}':
3590 if (! *dialect)
3591 putc (c, asm_out_file);
3592 *dialect = 0;
3593 break;
3594 default:
3595 gcc_unreachable ();
3596 }
3597
3598 return p;
3599 }
3600 #endif
3601
3602 /* Output text from TEMPLATE to the assembler output file,
3603 obeying %-directions to substitute operands taken from
3604 the vector OPERANDS.
3605
3606 %N (for N a digit) means print operand N in usual manner.
3607 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3608 and print the label name with no punctuation.
3609 %cN means require operand N to be a constant
3610 and print the constant expression with no punctuation.
3611 %aN means expect operand N to be a memory address
3612 (not a memory reference!) and print a reference
3613 to that address.
3614 %nN means expect operand N to be a constant
3615 and print a constant expression for minus the value
3616 of the operand, with no other punctuation. */
3617
3618 void
3619 output_asm_insn (const char *templ, rtx *operands)
3620 {
3621 const char *p;
3622 int c;
3623 #ifdef ASSEMBLER_DIALECT
3624 int dialect = 0;
3625 #endif
3626 int oporder[MAX_RECOG_OPERANDS];
3627 char opoutput[MAX_RECOG_OPERANDS];
3628 int ops = 0;
3629
3630 /* An insn may return a null string template
3631 in a case where no assembler code is needed. */
3632 if (*templ == 0)
3633 return;
3634
3635 memset (opoutput, 0, sizeof opoutput);
3636 p = templ;
3637 putc ('\t', asm_out_file);
3638
3639 #ifdef ASM_OUTPUT_OPCODE
3640 ASM_OUTPUT_OPCODE (asm_out_file, p);
3641 #endif
3642
3643 while ((c = *p++))
3644 switch (c)
3645 {
3646 case '\n':
3647 if (flag_verbose_asm)
3648 output_asm_operand_names (operands, oporder, ops);
3649 if (flag_print_asm_name)
3650 output_asm_name ();
3651
3652 ops = 0;
3653 memset (opoutput, 0, sizeof opoutput);
3654
3655 putc (c, asm_out_file);
3656 #ifdef ASM_OUTPUT_OPCODE
3657 while ((c = *p) == '\t')
3658 {
3659 putc (c, asm_out_file);
3660 p++;
3661 }
3662 ASM_OUTPUT_OPCODE (asm_out_file, p);
3663 #endif
3664 break;
3665
3666 #ifdef ASSEMBLER_DIALECT
3667 case '{':
3668 case '}':
3669 case '|':
3670 p = do_assembler_dialects (p, &dialect);
3671 break;
3672 #endif
3673
3674 case '%':
3675 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3676 if ASSEMBLER_DIALECT defined and these characters have a special
3677 meaning as dialect delimiters.*/
3678 if (*p == '%'
3679 #ifdef ASSEMBLER_DIALECT
3680 || *p == '{' || *p == '}' || *p == '|'
3681 #endif
3682 )
3683 {
3684 putc (*p, asm_out_file);
3685 p++;
3686 }
3687 /* %= outputs a number which is unique to each insn in the entire
3688 compilation. This is useful for making local labels that are
3689 referred to more than once in a given insn. */
3690 else if (*p == '=')
3691 {
3692 p++;
3693 fprintf (asm_out_file, "%d", insn_counter);
3694 }
3695 /* % followed by a letter and some digits
3696 outputs an operand in a special way depending on the letter.
3697 Letters `acln' are implemented directly.
3698 Other letters are passed to `output_operand' so that
3699 the TARGET_PRINT_OPERAND hook can define them. */
3700 else if (ISALPHA (*p))
3701 {
3702 int letter = *p++;
3703 unsigned long opnum;
3704 char *endptr;
3705
3706 opnum = strtoul (p, &endptr, 10);
3707
3708 if (endptr == p)
3709 output_operand_lossage ("operand number missing "
3710 "after %%-letter");
3711 else if (this_is_asm_operands && opnum >= insn_noperands)
3712 output_operand_lossage ("operand number out of range");
3713 else if (letter == 'l')
3714 output_asm_label (operands[opnum]);
3715 else if (letter == 'a')
3716 output_address (operands[opnum]);
3717 else if (letter == 'c')
3718 {
3719 if (CONSTANT_ADDRESS_P (operands[opnum]))
3720 output_addr_const (asm_out_file, operands[opnum]);
3721 else
3722 output_operand (operands[opnum], 'c');
3723 }
3724 else if (letter == 'n')
3725 {
3726 if (CONST_INT_P (operands[opnum]))
3727 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3728 - INTVAL (operands[opnum]));
3729 else
3730 {
3731 putc ('-', asm_out_file);
3732 output_addr_const (asm_out_file, operands[opnum]);
3733 }
3734 }
3735 else
3736 output_operand (operands[opnum], letter);
3737
3738 if (!opoutput[opnum])
3739 oporder[ops++] = opnum;
3740 opoutput[opnum] = 1;
3741
3742 p = endptr;
3743 c = *p;
3744 }
3745 /* % followed by a digit outputs an operand the default way. */
3746 else if (ISDIGIT (*p))
3747 {
3748 unsigned long opnum;
3749 char *endptr;
3750
3751 opnum = strtoul (p, &endptr, 10);
3752 if (this_is_asm_operands && opnum >= insn_noperands)
3753 output_operand_lossage ("operand number out of range");
3754 else
3755 output_operand (operands[opnum], 0);
3756
3757 if (!opoutput[opnum])
3758 oporder[ops++] = opnum;
3759 opoutput[opnum] = 1;
3760
3761 p = endptr;
3762 c = *p;
3763 }
3764 /* % followed by punctuation: output something for that
3765 punctuation character alone, with no operand. The
3766 TARGET_PRINT_OPERAND hook decides what is actually done. */
3767 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3768 output_operand (NULL_RTX, *p++);
3769 else
3770 output_operand_lossage ("invalid %%-code");
3771 break;
3772
3773 default:
3774 putc (c, asm_out_file);
3775 }
3776
3777 /* Write out the variable names for operands, if we know them. */
3778 if (flag_verbose_asm)
3779 output_asm_operand_names (operands, oporder, ops);
3780 if (flag_print_asm_name)
3781 output_asm_name ();
3782
3783 putc ('\n', asm_out_file);
3784 }
3785 \f
3786 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3787
3788 void
3789 output_asm_label (rtx x)
3790 {
3791 char buf[256];
3792
3793 if (GET_CODE (x) == LABEL_REF)
3794 x = LABEL_REF_LABEL (x);
3795 if (LABEL_P (x)
3796 || (NOTE_P (x)
3797 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3798 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3799 else
3800 output_operand_lossage ("'%%l' operand isn't a label");
3801
3802 assemble_name (asm_out_file, buf);
3803 }
3804
3805 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3806
3807 void
3808 mark_symbol_refs_as_used (rtx x)
3809 {
3810 subrtx_iterator::array_type array;
3811 FOR_EACH_SUBRTX (iter, array, x, ALL)
3812 {
3813 const_rtx x = *iter;
3814 if (GET_CODE (x) == SYMBOL_REF)
3815 if (tree t = SYMBOL_REF_DECL (x))
3816 assemble_external (t);
3817 }
3818 }
3819
3820 /* Print operand X using machine-dependent assembler syntax.
3821 CODE is a non-digit that preceded the operand-number in the % spec,
3822 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3823 between the % and the digits.
3824 When CODE is a non-letter, X is 0.
3825
3826 The meanings of the letters are machine-dependent and controlled
3827 by TARGET_PRINT_OPERAND. */
3828
3829 void
3830 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3831 {
3832 if (x && GET_CODE (x) == SUBREG)
3833 x = alter_subreg (&x, true);
3834
3835 /* X must not be a pseudo reg. */
3836 if (!targetm.no_register_allocation)
3837 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3838
3839 targetm.asm_out.print_operand (asm_out_file, x, code);
3840
3841 if (x == NULL_RTX)
3842 return;
3843
3844 mark_symbol_refs_as_used (x);
3845 }
3846
3847 /* Print a memory reference operand for address X using
3848 machine-dependent assembler syntax. */
3849
3850 void
3851 output_address (rtx x)
3852 {
3853 bool changed = false;
3854 walk_alter_subreg (&x, &changed);
3855 targetm.asm_out.print_operand_address (asm_out_file, x);
3856 }
3857 \f
3858 /* Print an integer constant expression in assembler syntax.
3859 Addition and subtraction are the only arithmetic
3860 that may appear in these expressions. */
3861
3862 void
3863 output_addr_const (FILE *file, rtx x)
3864 {
3865 char buf[256];
3866
3867 restart:
3868 switch (GET_CODE (x))
3869 {
3870 case PC:
3871 putc ('.', file);
3872 break;
3873
3874 case SYMBOL_REF:
3875 if (SYMBOL_REF_DECL (x))
3876 assemble_external (SYMBOL_REF_DECL (x));
3877 #ifdef ASM_OUTPUT_SYMBOL_REF
3878 ASM_OUTPUT_SYMBOL_REF (file, x);
3879 #else
3880 assemble_name (file, XSTR (x, 0));
3881 #endif
3882 break;
3883
3884 case LABEL_REF:
3885 x = LABEL_REF_LABEL (x);
3886 /* Fall through. */
3887 case CODE_LABEL:
3888 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3889 #ifdef ASM_OUTPUT_LABEL_REF
3890 ASM_OUTPUT_LABEL_REF (file, buf);
3891 #else
3892 assemble_name (file, buf);
3893 #endif
3894 break;
3895
3896 case CONST_INT:
3897 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3898 break;
3899
3900 case CONST:
3901 /* This used to output parentheses around the expression,
3902 but that does not work on the 386 (either ATT or BSD assembler). */
3903 output_addr_const (file, XEXP (x, 0));
3904 break;
3905
3906 case CONST_WIDE_INT:
3907 /* We do not know the mode here so we have to use a round about
3908 way to build a wide-int to get it printed properly. */
3909 {
3910 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3911 CONST_WIDE_INT_NUNITS (x),
3912 CONST_WIDE_INT_NUNITS (x)
3913 * HOST_BITS_PER_WIDE_INT,
3914 false);
3915 print_decs (w, file);
3916 }
3917 break;
3918
3919 case CONST_DOUBLE:
3920 if (CONST_DOUBLE_AS_INT_P (x))
3921 {
3922 /* We can use %d if the number is one word and positive. */
3923 if (CONST_DOUBLE_HIGH (x))
3924 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3925 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3926 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3927 else if (CONST_DOUBLE_LOW (x) < 0)
3928 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3929 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3930 else
3931 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3932 }
3933 else
3934 /* We can't handle floating point constants;
3935 PRINT_OPERAND must handle them. */
3936 output_operand_lossage ("floating constant misused");
3937 break;
3938
3939 case CONST_FIXED:
3940 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3941 break;
3942
3943 case PLUS:
3944 /* Some assemblers need integer constants to appear last (eg masm). */
3945 if (CONST_INT_P (XEXP (x, 0)))
3946 {
3947 output_addr_const (file, XEXP (x, 1));
3948 if (INTVAL (XEXP (x, 0)) >= 0)
3949 fprintf (file, "+");
3950 output_addr_const (file, XEXP (x, 0));
3951 }
3952 else
3953 {
3954 output_addr_const (file, XEXP (x, 0));
3955 if (!CONST_INT_P (XEXP (x, 1))
3956 || INTVAL (XEXP (x, 1)) >= 0)
3957 fprintf (file, "+");
3958 output_addr_const (file, XEXP (x, 1));
3959 }
3960 break;
3961
3962 case MINUS:
3963 /* Avoid outputting things like x-x or x+5-x,
3964 since some assemblers can't handle that. */
3965 x = simplify_subtraction (x);
3966 if (GET_CODE (x) != MINUS)
3967 goto restart;
3968
3969 output_addr_const (file, XEXP (x, 0));
3970 fprintf (file, "-");
3971 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3972 || GET_CODE (XEXP (x, 1)) == PC
3973 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3974 output_addr_const (file, XEXP (x, 1));
3975 else
3976 {
3977 fputs (targetm.asm_out.open_paren, file);
3978 output_addr_const (file, XEXP (x, 1));
3979 fputs (targetm.asm_out.close_paren, file);
3980 }
3981 break;
3982
3983 case ZERO_EXTEND:
3984 case SIGN_EXTEND:
3985 case SUBREG:
3986 case TRUNCATE:
3987 output_addr_const (file, XEXP (x, 0));
3988 break;
3989
3990 default:
3991 if (targetm.asm_out.output_addr_const_extra (file, x))
3992 break;
3993
3994 output_operand_lossage ("invalid expression as operand");
3995 }
3996 }
3997 \f
3998 /* Output a quoted string. */
3999
4000 void
4001 output_quoted_string (FILE *asm_file, const char *string)
4002 {
4003 #ifdef OUTPUT_QUOTED_STRING
4004 OUTPUT_QUOTED_STRING (asm_file, string);
4005 #else
4006 char c;
4007
4008 putc ('\"', asm_file);
4009 while ((c = *string++) != 0)
4010 {
4011 if (ISPRINT (c))
4012 {
4013 if (c == '\"' || c == '\\')
4014 putc ('\\', asm_file);
4015 putc (c, asm_file);
4016 }
4017 else
4018 fprintf (asm_file, "\\%03o", (unsigned char) c);
4019 }
4020 putc ('\"', asm_file);
4021 #endif
4022 }
4023 \f
4024 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4025
4026 void
4027 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4028 {
4029 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4030 if (value == 0)
4031 putc ('0', f);
4032 else
4033 {
4034 char *p = buf + sizeof (buf);
4035 do
4036 *--p = "0123456789abcdef"[value % 16];
4037 while ((value /= 16) != 0);
4038 *--p = 'x';
4039 *--p = '0';
4040 fwrite (p, 1, buf + sizeof (buf) - p, f);
4041 }
4042 }
4043
4044 /* Internal function that prints an unsigned long in decimal in reverse.
4045 The output string IS NOT null-terminated. */
4046
4047 static int
4048 sprint_ul_rev (char *s, unsigned long value)
4049 {
4050 int i = 0;
4051 do
4052 {
4053 s[i] = "0123456789"[value % 10];
4054 value /= 10;
4055 i++;
4056 /* alternate version, without modulo */
4057 /* oldval = value; */
4058 /* value /= 10; */
4059 /* s[i] = "0123456789" [oldval - 10*value]; */
4060 /* i++ */
4061 }
4062 while (value != 0);
4063 return i;
4064 }
4065
4066 /* Write an unsigned long as decimal to a file, fast. */
4067
4068 void
4069 fprint_ul (FILE *f, unsigned long value)
4070 {
4071 /* python says: len(str(2**64)) == 20 */
4072 char s[20];
4073 int i;
4074
4075 i = sprint_ul_rev (s, value);
4076
4077 /* It's probably too small to bother with string reversal and fputs. */
4078 do
4079 {
4080 i--;
4081 putc (s[i], f);
4082 }
4083 while (i != 0);
4084 }
4085
4086 /* Write an unsigned long as decimal to a string, fast.
4087 s must be wide enough to not overflow, at least 21 chars.
4088 Returns the length of the string (without terminating '\0'). */
4089
4090 int
4091 sprint_ul (char *s, unsigned long value)
4092 {
4093 int len = sprint_ul_rev (s, value);
4094 s[len] = '\0';
4095
4096 std::reverse (s, s + len);
4097 return len;
4098 }
4099
4100 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4101 %R prints the value of REGISTER_PREFIX.
4102 %L prints the value of LOCAL_LABEL_PREFIX.
4103 %U prints the value of USER_LABEL_PREFIX.
4104 %I prints the value of IMMEDIATE_PREFIX.
4105 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4106 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4107
4108 We handle alternate assembler dialects here, just like output_asm_insn. */
4109
4110 void
4111 asm_fprintf (FILE *file, const char *p, ...)
4112 {
4113 char buf[10];
4114 char *q, c;
4115 #ifdef ASSEMBLER_DIALECT
4116 int dialect = 0;
4117 #endif
4118 va_list argptr;
4119
4120 va_start (argptr, p);
4121
4122 buf[0] = '%';
4123
4124 while ((c = *p++))
4125 switch (c)
4126 {
4127 #ifdef ASSEMBLER_DIALECT
4128 case '{':
4129 case '}':
4130 case '|':
4131 p = do_assembler_dialects (p, &dialect);
4132 break;
4133 #endif
4134
4135 case '%':
4136 c = *p++;
4137 q = &buf[1];
4138 while (strchr ("-+ #0", c))
4139 {
4140 *q++ = c;
4141 c = *p++;
4142 }
4143 while (ISDIGIT (c) || c == '.')
4144 {
4145 *q++ = c;
4146 c = *p++;
4147 }
4148 switch (c)
4149 {
4150 case '%':
4151 putc ('%', file);
4152 break;
4153
4154 case 'd': case 'i': case 'u':
4155 case 'x': case 'X': case 'o':
4156 case 'c':
4157 *q++ = c;
4158 *q = 0;
4159 fprintf (file, buf, va_arg (argptr, int));
4160 break;
4161
4162 case 'w':
4163 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4164 'o' cases, but we do not check for those cases. It
4165 means that the value is a HOST_WIDE_INT, which may be
4166 either `long' or `long long'. */
4167 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4168 q += strlen (HOST_WIDE_INT_PRINT);
4169 *q++ = *p++;
4170 *q = 0;
4171 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4172 break;
4173
4174 case 'l':
4175 *q++ = c;
4176 #ifdef HAVE_LONG_LONG
4177 if (*p == 'l')
4178 {
4179 *q++ = *p++;
4180 *q++ = *p++;
4181 *q = 0;
4182 fprintf (file, buf, va_arg (argptr, long long));
4183 }
4184 else
4185 #endif
4186 {
4187 *q++ = *p++;
4188 *q = 0;
4189 fprintf (file, buf, va_arg (argptr, long));
4190 }
4191
4192 break;
4193
4194 case 's':
4195 *q++ = c;
4196 *q = 0;
4197 fprintf (file, buf, va_arg (argptr, char *));
4198 break;
4199
4200 case 'O':
4201 #ifdef ASM_OUTPUT_OPCODE
4202 ASM_OUTPUT_OPCODE (asm_out_file, p);
4203 #endif
4204 break;
4205
4206 case 'R':
4207 #ifdef REGISTER_PREFIX
4208 fprintf (file, "%s", REGISTER_PREFIX);
4209 #endif
4210 break;
4211
4212 case 'I':
4213 #ifdef IMMEDIATE_PREFIX
4214 fprintf (file, "%s", IMMEDIATE_PREFIX);
4215 #endif
4216 break;
4217
4218 case 'L':
4219 #ifdef LOCAL_LABEL_PREFIX
4220 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4221 #endif
4222 break;
4223
4224 case 'U':
4225 fputs (user_label_prefix, file);
4226 break;
4227
4228 #ifdef ASM_FPRINTF_EXTENSIONS
4229 /* Uppercase letters are reserved for general use by asm_fprintf
4230 and so are not available to target specific code. In order to
4231 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4232 they are defined here. As they get turned into real extensions
4233 to asm_fprintf they should be removed from this list. */
4234 case 'A': case 'B': case 'C': case 'D': case 'E':
4235 case 'F': case 'G': case 'H': case 'J': case 'K':
4236 case 'M': case 'N': case 'P': case 'Q': case 'S':
4237 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4238 break;
4239
4240 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4241 #endif
4242 default:
4243 gcc_unreachable ();
4244 }
4245 break;
4246
4247 default:
4248 putc (c, file);
4249 }
4250 va_end (argptr);
4251 }
4252 \f
4253 /* Return nonzero if this function has no function calls. */
4254
4255 int
4256 leaf_function_p (void)
4257 {
4258 rtx_insn *insn;
4259
4260 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4261 functions even if they call mcount. */
4262 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4263 return 0;
4264
4265 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4266 {
4267 if (CALL_P (insn)
4268 && ! SIBLING_CALL_P (insn))
4269 return 0;
4270 if (NONJUMP_INSN_P (insn)
4271 && GET_CODE (PATTERN (insn)) == SEQUENCE
4272 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4273 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4274 return 0;
4275 }
4276
4277 return 1;
4278 }
4279
4280 /* Return 1 if branch is a forward branch.
4281 Uses insn_shuid array, so it works only in the final pass. May be used by
4282 output templates to customary add branch prediction hints.
4283 */
4284 int
4285 final_forward_branch_p (rtx_insn *insn)
4286 {
4287 int insn_id, label_id;
4288
4289 gcc_assert (uid_shuid);
4290 insn_id = INSN_SHUID (insn);
4291 label_id = INSN_SHUID (JUMP_LABEL (insn));
4292 /* We've hit some insns that does not have id information available. */
4293 gcc_assert (insn_id && label_id);
4294 return insn_id < label_id;
4295 }
4296
4297 /* On some machines, a function with no call insns
4298 can run faster if it doesn't create its own register window.
4299 When output, the leaf function should use only the "output"
4300 registers. Ordinarily, the function would be compiled to use
4301 the "input" registers to find its arguments; it is a candidate
4302 for leaf treatment if it uses only the "input" registers.
4303 Leaf function treatment means renumbering so the function
4304 uses the "output" registers instead. */
4305
4306 #ifdef LEAF_REGISTERS
4307
4308 /* Return 1 if this function uses only the registers that can be
4309 safely renumbered. */
4310
4311 int
4312 only_leaf_regs_used (void)
4313 {
4314 int i;
4315 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4316
4317 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4318 if ((df_regs_ever_live_p (i) || global_regs[i])
4319 && ! permitted_reg_in_leaf_functions[i])
4320 return 0;
4321
4322 if (crtl->uses_pic_offset_table
4323 && pic_offset_table_rtx != 0
4324 && REG_P (pic_offset_table_rtx)
4325 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4326 return 0;
4327
4328 return 1;
4329 }
4330
4331 /* Scan all instructions and renumber all registers into those
4332 available in leaf functions. */
4333
4334 static void
4335 leaf_renumber_regs (rtx_insn *first)
4336 {
4337 rtx_insn *insn;
4338
4339 /* Renumber only the actual patterns.
4340 The reg-notes can contain frame pointer refs,
4341 and renumbering them could crash, and should not be needed. */
4342 for (insn = first; insn; insn = NEXT_INSN (insn))
4343 if (INSN_P (insn))
4344 leaf_renumber_regs_insn (PATTERN (insn));
4345 }
4346
4347 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4348 available in leaf functions. */
4349
4350 void
4351 leaf_renumber_regs_insn (rtx in_rtx)
4352 {
4353 int i, j;
4354 const char *format_ptr;
4355
4356 if (in_rtx == 0)
4357 return;
4358
4359 /* Renumber all input-registers into output-registers.
4360 renumbered_regs would be 1 for an output-register;
4361 they */
4362
4363 if (REG_P (in_rtx))
4364 {
4365 int newreg;
4366
4367 /* Don't renumber the same reg twice. */
4368 if (in_rtx->used)
4369 return;
4370
4371 newreg = REGNO (in_rtx);
4372 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4373 to reach here as part of a REG_NOTE. */
4374 if (newreg >= FIRST_PSEUDO_REGISTER)
4375 {
4376 in_rtx->used = 1;
4377 return;
4378 }
4379 newreg = LEAF_REG_REMAP (newreg);
4380 gcc_assert (newreg >= 0);
4381 df_set_regs_ever_live (REGNO (in_rtx), false);
4382 df_set_regs_ever_live (newreg, true);
4383 SET_REGNO (in_rtx, newreg);
4384 in_rtx->used = 1;
4385 return;
4386 }
4387
4388 if (INSN_P (in_rtx))
4389 {
4390 /* Inside a SEQUENCE, we find insns.
4391 Renumber just the patterns of these insns,
4392 just as we do for the top-level insns. */
4393 leaf_renumber_regs_insn (PATTERN (in_rtx));
4394 return;
4395 }
4396
4397 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4398
4399 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4400 switch (*format_ptr++)
4401 {
4402 case 'e':
4403 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4404 break;
4405
4406 case 'E':
4407 if (NULL != XVEC (in_rtx, i))
4408 {
4409 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4410 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4411 }
4412 break;
4413
4414 case 'S':
4415 case 's':
4416 case '0':
4417 case 'i':
4418 case 'w':
4419 case 'n':
4420 case 'u':
4421 break;
4422
4423 default:
4424 gcc_unreachable ();
4425 }
4426 }
4427 #endif
4428 \f
4429 /* Turn the RTL into assembly. */
4430 static unsigned int
4431 rest_of_handle_final (void)
4432 {
4433 const char *fnname = get_fnname_from_decl (current_function_decl);
4434
4435 assemble_start_function (current_function_decl, fnname);
4436 final_start_function (get_insns (), asm_out_file, optimize);
4437 final (get_insns (), asm_out_file, optimize);
4438 if (flag_ipa_ra)
4439 collect_fn_hard_reg_usage ();
4440 final_end_function ();
4441
4442 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4443 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4444 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4445 output_function_exception_table (fnname);
4446
4447 assemble_end_function (current_function_decl, fnname);
4448
4449 user_defined_section_attribute = false;
4450
4451 /* Free up reg info memory. */
4452 free_reg_info ();
4453
4454 if (! quiet_flag)
4455 fflush (asm_out_file);
4456
4457 /* Write DBX symbols if requested. */
4458
4459 /* Note that for those inline functions where we don't initially
4460 know for certain that we will be generating an out-of-line copy,
4461 the first invocation of this routine (rest_of_compilation) will
4462 skip over this code by doing a `goto exit_rest_of_compilation;'.
4463 Later on, wrapup_global_declarations will (indirectly) call
4464 rest_of_compilation again for those inline functions that need
4465 to have out-of-line copies generated. During that call, we
4466 *will* be routed past here. */
4467
4468 timevar_push (TV_SYMOUT);
4469 if (!DECL_IGNORED_P (current_function_decl))
4470 debug_hooks->function_decl (current_function_decl);
4471 timevar_pop (TV_SYMOUT);
4472
4473 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4474 DECL_INITIAL (current_function_decl) = error_mark_node;
4475
4476 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4477 && targetm.have_ctors_dtors)
4478 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4479 decl_init_priority_lookup
4480 (current_function_decl));
4481 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4482 && targetm.have_ctors_dtors)
4483 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4484 decl_fini_priority_lookup
4485 (current_function_decl));
4486 return 0;
4487 }
4488
4489 namespace {
4490
4491 const pass_data pass_data_final =
4492 {
4493 RTL_PASS, /* type */
4494 "final", /* name */
4495 OPTGROUP_NONE, /* optinfo_flags */
4496 TV_FINAL, /* tv_id */
4497 0, /* properties_required */
4498 0, /* properties_provided */
4499 0, /* properties_destroyed */
4500 0, /* todo_flags_start */
4501 0, /* todo_flags_finish */
4502 };
4503
4504 class pass_final : public rtl_opt_pass
4505 {
4506 public:
4507 pass_final (gcc::context *ctxt)
4508 : rtl_opt_pass (pass_data_final, ctxt)
4509 {}
4510
4511 /* opt_pass methods: */
4512 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4513
4514 }; // class pass_final
4515
4516 } // anon namespace
4517
4518 rtl_opt_pass *
4519 make_pass_final (gcc::context *ctxt)
4520 {
4521 return new pass_final (ctxt);
4522 }
4523
4524
4525 static unsigned int
4526 rest_of_handle_shorten_branches (void)
4527 {
4528 /* Shorten branches. */
4529 shorten_branches (get_insns ());
4530 return 0;
4531 }
4532
4533 namespace {
4534
4535 const pass_data pass_data_shorten_branches =
4536 {
4537 RTL_PASS, /* type */
4538 "shorten", /* name */
4539 OPTGROUP_NONE, /* optinfo_flags */
4540 TV_SHORTEN_BRANCH, /* tv_id */
4541 0, /* properties_required */
4542 0, /* properties_provided */
4543 0, /* properties_destroyed */
4544 0, /* todo_flags_start */
4545 0, /* todo_flags_finish */
4546 };
4547
4548 class pass_shorten_branches : public rtl_opt_pass
4549 {
4550 public:
4551 pass_shorten_branches (gcc::context *ctxt)
4552 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4553 {}
4554
4555 /* opt_pass methods: */
4556 virtual unsigned int execute (function *)
4557 {
4558 return rest_of_handle_shorten_branches ();
4559 }
4560
4561 }; // class pass_shorten_branches
4562
4563 } // anon namespace
4564
4565 rtl_opt_pass *
4566 make_pass_shorten_branches (gcc::context *ctxt)
4567 {
4568 return new pass_shorten_branches (ctxt);
4569 }
4570
4571
4572 static unsigned int
4573 rest_of_clean_state (void)
4574 {
4575 rtx_insn *insn, *next;
4576 FILE *final_output = NULL;
4577 int save_unnumbered = flag_dump_unnumbered;
4578 int save_noaddr = flag_dump_noaddr;
4579
4580 if (flag_dump_final_insns)
4581 {
4582 final_output = fopen (flag_dump_final_insns, "a");
4583 if (!final_output)
4584 {
4585 error ("could not open final insn dump file %qs: %m",
4586 flag_dump_final_insns);
4587 flag_dump_final_insns = NULL;
4588 }
4589 else
4590 {
4591 flag_dump_noaddr = flag_dump_unnumbered = 1;
4592 if (flag_compare_debug_opt || flag_compare_debug)
4593 dump_flags |= TDF_NOUID;
4594 dump_function_header (final_output, current_function_decl,
4595 dump_flags);
4596 final_insns_dump_p = true;
4597
4598 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4599 if (LABEL_P (insn))
4600 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4601 else
4602 {
4603 if (NOTE_P (insn))
4604 set_block_for_insn (insn, NULL);
4605 INSN_UID (insn) = 0;
4606 }
4607 }
4608 }
4609
4610 /* It is very important to decompose the RTL instruction chain here:
4611 debug information keeps pointing into CODE_LABEL insns inside the function
4612 body. If these remain pointing to the other insns, we end up preserving
4613 whole RTL chain and attached detailed debug info in memory. */
4614 for (insn = get_insns (); insn; insn = next)
4615 {
4616 next = NEXT_INSN (insn);
4617 SET_NEXT_INSN (insn) = NULL;
4618 SET_PREV_INSN (insn) = NULL;
4619
4620 if (final_output
4621 && (!NOTE_P (insn) ||
4622 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4623 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4624 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4625 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4626 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4627 print_rtl_single (final_output, insn);
4628 }
4629
4630 if (final_output)
4631 {
4632 flag_dump_noaddr = save_noaddr;
4633 flag_dump_unnumbered = save_unnumbered;
4634 final_insns_dump_p = false;
4635
4636 if (fclose (final_output))
4637 {
4638 error ("could not close final insn dump file %qs: %m",
4639 flag_dump_final_insns);
4640 flag_dump_final_insns = NULL;
4641 }
4642 }
4643
4644 /* In case the function was not output,
4645 don't leave any temporary anonymous types
4646 queued up for sdb output. */
4647 #ifdef SDB_DEBUGGING_INFO
4648 if (write_symbols == SDB_DEBUG)
4649 sdbout_types (NULL_TREE);
4650 #endif
4651
4652 flag_rerun_cse_after_global_opts = 0;
4653 reload_completed = 0;
4654 epilogue_completed = 0;
4655 #ifdef STACK_REGS
4656 regstack_completed = 0;
4657 #endif
4658
4659 /* Clear out the insn_length contents now that they are no
4660 longer valid. */
4661 init_insn_lengths ();
4662
4663 /* Show no temporary slots allocated. */
4664 init_temp_slots ();
4665
4666 free_bb_for_insn ();
4667
4668 delete_tree_ssa (cfun);
4669
4670 /* We can reduce stack alignment on call site only when we are sure that
4671 the function body just produced will be actually used in the final
4672 executable. */
4673 if (decl_binds_to_current_def_p (current_function_decl))
4674 {
4675 unsigned int pref = crtl->preferred_stack_boundary;
4676 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4677 pref = crtl->stack_alignment_needed;
4678 cgraph_node::rtl_info (current_function_decl)
4679 ->preferred_incoming_stack_boundary = pref;
4680 }
4681
4682 /* Make sure volatile mem refs aren't considered valid operands for
4683 arithmetic insns. We must call this here if this is a nested inline
4684 function, since the above code leaves us in the init_recog state,
4685 and the function context push/pop code does not save/restore volatile_ok.
4686
4687 ??? Maybe it isn't necessary for expand_start_function to call this
4688 anymore if we do it here? */
4689
4690 init_recog_no_volatile ();
4691
4692 /* We're done with this function. Free up memory if we can. */
4693 free_after_parsing (cfun);
4694 free_after_compilation (cfun);
4695 return 0;
4696 }
4697
4698 namespace {
4699
4700 const pass_data pass_data_clean_state =
4701 {
4702 RTL_PASS, /* type */
4703 "*clean_state", /* name */
4704 OPTGROUP_NONE, /* optinfo_flags */
4705 TV_FINAL, /* tv_id */
4706 0, /* properties_required */
4707 0, /* properties_provided */
4708 PROP_rtl, /* properties_destroyed */
4709 0, /* todo_flags_start */
4710 0, /* todo_flags_finish */
4711 };
4712
4713 class pass_clean_state : public rtl_opt_pass
4714 {
4715 public:
4716 pass_clean_state (gcc::context *ctxt)
4717 : rtl_opt_pass (pass_data_clean_state, ctxt)
4718 {}
4719
4720 /* opt_pass methods: */
4721 virtual unsigned int execute (function *)
4722 {
4723 return rest_of_clean_state ();
4724 }
4725
4726 }; // class pass_clean_state
4727
4728 } // anon namespace
4729
4730 rtl_opt_pass *
4731 make_pass_clean_state (gcc::context *ctxt)
4732 {
4733 return new pass_clean_state (ctxt);
4734 }
4735
4736 /* Return true if INSN is a call to the current function. */
4737
4738 static bool
4739 self_recursive_call_p (rtx_insn *insn)
4740 {
4741 tree fndecl = get_call_fndecl (insn);
4742 return (fndecl == current_function_decl
4743 && decl_binds_to_current_def_p (fndecl));
4744 }
4745
4746 /* Collect hard register usage for the current function. */
4747
4748 static void
4749 collect_fn_hard_reg_usage (void)
4750 {
4751 rtx_insn *insn;
4752 #ifdef STACK_REGS
4753 int i;
4754 #endif
4755 struct cgraph_rtl_info *node;
4756 HARD_REG_SET function_used_regs;
4757
4758 /* ??? To be removed when all the ports have been fixed. */
4759 if (!targetm.call_fusage_contains_non_callee_clobbers)
4760 return;
4761
4762 CLEAR_HARD_REG_SET (function_used_regs);
4763
4764 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4765 {
4766 HARD_REG_SET insn_used_regs;
4767
4768 if (!NONDEBUG_INSN_P (insn))
4769 continue;
4770
4771 if (CALL_P (insn)
4772 && !self_recursive_call_p (insn))
4773 {
4774 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4775 call_used_reg_set))
4776 return;
4777
4778 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4779 }
4780
4781 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4782 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4783 }
4784
4785 /* Be conservative - mark fixed and global registers as used. */
4786 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4787
4788 #ifdef STACK_REGS
4789 /* Handle STACK_REGS conservatively, since the df-framework does not
4790 provide accurate information for them. */
4791
4792 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4793 SET_HARD_REG_BIT (function_used_regs, i);
4794 #endif
4795
4796 /* The information we have gathered is only interesting if it exposes a
4797 register from the call_used_regs that is not used in this function. */
4798 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4799 return;
4800
4801 node = cgraph_node::rtl_info (current_function_decl);
4802 gcc_assert (node != NULL);
4803
4804 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4805 node->function_used_regs_valid = 1;
4806 }
4807
4808 /* Get the declaration of the function called by INSN. */
4809
4810 static tree
4811 get_call_fndecl (rtx_insn *insn)
4812 {
4813 rtx note, datum;
4814
4815 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4816 if (note == NULL_RTX)
4817 return NULL_TREE;
4818
4819 datum = XEXP (note, 0);
4820 if (datum != NULL_RTX)
4821 return SYMBOL_REF_DECL (datum);
4822
4823 return NULL_TREE;
4824 }
4825
4826 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4827 call targets that can be overwritten. */
4828
4829 static struct cgraph_rtl_info *
4830 get_call_cgraph_rtl_info (rtx_insn *insn)
4831 {
4832 tree fndecl;
4833
4834 if (insn == NULL_RTX)
4835 return NULL;
4836
4837 fndecl = get_call_fndecl (insn);
4838 if (fndecl == NULL_TREE
4839 || !decl_binds_to_current_def_p (fndecl))
4840 return NULL;
4841
4842 return cgraph_node::rtl_info (fndecl);
4843 }
4844
4845 /* Find hard registers used by function call instruction INSN, and return them
4846 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4847
4848 bool
4849 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4850 HARD_REG_SET default_set)
4851 {
4852 if (flag_ipa_ra)
4853 {
4854 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4855 if (node != NULL
4856 && node->function_used_regs_valid)
4857 {
4858 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4859 AND_HARD_REG_SET (*reg_set, default_set);
4860 return true;
4861 }
4862 }
4863
4864 COPY_HARD_REG_SET (*reg_set, default_set);
4865 return false;
4866 }