24833815276530d744d83e8a20a7ba61b20717f3
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
22
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
28
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
33
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
36
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
40
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
44
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "params.h"
79 #include "asan.h"
80 #include "rtl-iter.h"
81 #include "print-rtl.h"
82
83 #ifdef XCOFF_DEBUGGING_INFO
84 #include "xcoffout.h" /* Needed for external data declarations. */
85 #endif
86
87 #include "dwarf2out.h"
88
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
92
93 #include "sdbout.h"
94
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96 So define a null default for it to save conditionalization later. */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
99 #endif
100
101 /* Is the given character a logical line separator for the assembler? */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #endif
105
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #endif
109
110 /* Bitflags used by final_scan_insn. */
111 #define SEEN_NOTE 1
112 #define SEEN_EMITTED 2
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx_insn *debug_insn;
116 rtx_insn *current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Last discriminator written to assembly. */
122 static int last_discriminator;
123
124 /* Discriminator of current block. */
125 static int discriminator;
126
127 /* Highest line number in current block. */
128 static int high_block_linenum;
129
130 /* Likewise for function. */
131 static int high_function_linenum;
132
133 /* Filename of last NOTE. */
134 static const char *last_filename;
135
136 /* Override filename and line number. */
137 static const char *override_filename;
138 static int override_linenum;
139
140 /* Whether to force emission of a line note before the next insn. */
141 static bool force_source_line = false;
142
143 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
144
145 /* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148 const rtx_insn *this_is_asm_operands;
149
150 /* Number of operands of this insn, for an `asm' with operands. */
151 static unsigned int insn_noperands;
152
153 /* Compare optimization flag. */
154
155 static rtx last_ignored_compare = 0;
156
157 /* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
159
160 static int insn_counter = 0;
161
162 /* This variable contains machine-dependent flags (defined in tm.h)
163 set and examined by output routines
164 that describe how to interpret the condition codes properly. */
165
166 CC_STATUS cc_status;
167
168 /* During output of an insn, this contains a copy of cc_status
169 from before the insn. */
170
171 CC_STATUS cc_prev_status;
172
173 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
174
175 static int block_depth;
176
177 /* Nonzero if have enabled APP processing of our assembler output. */
178
179 static int app_on;
180
181 /* If we are outputting an insn sequence, this contains the sequence rtx.
182 Zero otherwise. */
183
184 rtx_sequence *final_sequence;
185
186 #ifdef ASSEMBLER_DIALECT
187
188 /* Number of the assembler dialect to use, starting at 0. */
189 static int dialect_number;
190 #endif
191
192 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
193 rtx current_insn_predicate;
194
195 /* True if printing into -fdump-final-insns= dump. */
196 bool final_insns_dump_p;
197
198 /* True if profile_function should be called, but hasn't been called yet. */
199 static bool need_profile_function;
200
201 static int asm_insn_count (rtx);
202 static void profile_function (FILE *);
203 static void profile_after_prologue (FILE *);
204 static bool notice_source_line (rtx_insn *, bool *);
205 static rtx walk_alter_subreg (rtx *, bool *);
206 static void output_asm_name (void);
207 static void output_alternate_entry_point (FILE *, rtx_insn *);
208 static tree get_mem_expr_from_op (rtx, int *);
209 static void output_asm_operand_names (rtx *, int *, int);
210 #ifdef LEAF_REGISTERS
211 static void leaf_renumber_regs (rtx_insn *);
212 #endif
213 #if HAVE_cc0
214 static int alter_cond (rtx);
215 #endif
216 #ifndef ADDR_VEC_ALIGN
217 static int final_addr_vec_align (rtx_insn *);
218 #endif
219 static int align_fuzz (rtx, rtx, int, unsigned);
220 static void collect_fn_hard_reg_usage (void);
221 static tree get_call_fndecl (rtx_insn *);
222 \f
223 /* Initialize data in final at the beginning of a compilation. */
224
225 void
226 init_final (const char *filename ATTRIBUTE_UNUSED)
227 {
228 app_on = 0;
229 final_sequence = 0;
230
231 #ifdef ASSEMBLER_DIALECT
232 dialect_number = ASSEMBLER_DIALECT;
233 #endif
234 }
235
236 /* Default target function prologue and epilogue assembler output.
237
238 If not overridden for epilogue code, then the function body itself
239 contains return instructions wherever needed. */
240 void
241 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
243 {
244 }
245
246 void
247 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
248 tree decl ATTRIBUTE_UNUSED,
249 bool new_is_cold ATTRIBUTE_UNUSED)
250 {
251 }
252
253 /* Default target hook that outputs nothing to a stream. */
254 void
255 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
256 {
257 }
258
259 /* Enable APP processing of subsequent output.
260 Used before the output from an `asm' statement. */
261
262 void
263 app_enable (void)
264 {
265 if (! app_on)
266 {
267 fputs (ASM_APP_ON, asm_out_file);
268 app_on = 1;
269 }
270 }
271
272 /* Disable APP processing of subsequent output.
273 Called from varasm.c before most kinds of output. */
274
275 void
276 app_disable (void)
277 {
278 if (app_on)
279 {
280 fputs (ASM_APP_OFF, asm_out_file);
281 app_on = 0;
282 }
283 }
284 \f
285 /* Return the number of slots filled in the current
286 delayed branch sequence (we don't count the insn needing the
287 delay slot). Zero if not in a delayed branch sequence. */
288
289 int
290 dbr_sequence_length (void)
291 {
292 if (final_sequence != 0)
293 return XVECLEN (final_sequence, 0) - 1;
294 else
295 return 0;
296 }
297 \f
298 /* The next two pages contain routines used to compute the length of an insn
299 and to shorten branches. */
300
301 /* Arrays for insn lengths, and addresses. The latter is referenced by
302 `insn_current_length'. */
303
304 static int *insn_lengths;
305
306 vec<int> insn_addresses_;
307
308 /* Max uid for which the above arrays are valid. */
309 static int insn_lengths_max_uid;
310
311 /* Address of insn being processed. Used by `insn_current_length'. */
312 int insn_current_address;
313
314 /* Address of insn being processed in previous iteration. */
315 int insn_last_address;
316
317 /* known invariant alignment of insn being processed. */
318 int insn_current_align;
319
320 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
321 gives the next following alignment insn that increases the known
322 alignment, or NULL_RTX if there is no such insn.
323 For any alignment obtained this way, we can again index uid_align with
324 its uid to obtain the next following align that in turn increases the
325 alignment, till we reach NULL_RTX; the sequence obtained this way
326 for each insn we'll call the alignment chain of this insn in the following
327 comments. */
328
329 struct label_alignment
330 {
331 short alignment;
332 short max_skip;
333 };
334
335 static rtx *uid_align;
336 static int *uid_shuid;
337 static struct label_alignment *label_align;
338
339 /* Indicate that branch shortening hasn't yet been done. */
340
341 void
342 init_insn_lengths (void)
343 {
344 if (uid_shuid)
345 {
346 free (uid_shuid);
347 uid_shuid = 0;
348 }
349 if (insn_lengths)
350 {
351 free (insn_lengths);
352 insn_lengths = 0;
353 insn_lengths_max_uid = 0;
354 }
355 if (HAVE_ATTR_length)
356 INSN_ADDRESSES_FREE ();
357 if (uid_align)
358 {
359 free (uid_align);
360 uid_align = 0;
361 }
362 }
363
364 /* Obtain the current length of an insn. If branch shortening has been done,
365 get its actual length. Otherwise, use FALLBACK_FN to calculate the
366 length. */
367 static int
368 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
369 {
370 rtx body;
371 int i;
372 int length = 0;
373
374 if (!HAVE_ATTR_length)
375 return 0;
376
377 if (insn_lengths_max_uid > INSN_UID (insn))
378 return insn_lengths[INSN_UID (insn)];
379 else
380 switch (GET_CODE (insn))
381 {
382 case NOTE:
383 case BARRIER:
384 case CODE_LABEL:
385 case DEBUG_INSN:
386 return 0;
387
388 case CALL_INSN:
389 case JUMP_INSN:
390 length = fallback_fn (insn);
391 break;
392
393 case INSN:
394 body = PATTERN (insn);
395 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
396 return 0;
397
398 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
399 length = asm_insn_count (body) * fallback_fn (insn);
400 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
401 for (i = 0; i < seq->len (); i++)
402 length += get_attr_length_1 (seq->insn (i), fallback_fn);
403 else
404 length = fallback_fn (insn);
405 break;
406
407 default:
408 break;
409 }
410
411 #ifdef ADJUST_INSN_LENGTH
412 ADJUST_INSN_LENGTH (insn, length);
413 #endif
414 return length;
415 }
416
417 /* Obtain the current length of an insn. If branch shortening has been done,
418 get its actual length. Otherwise, get its maximum length. */
419 int
420 get_attr_length (rtx_insn *insn)
421 {
422 return get_attr_length_1 (insn, insn_default_length);
423 }
424
425 /* Obtain the current length of an insn. If branch shortening has been done,
426 get its actual length. Otherwise, get its minimum length. */
427 int
428 get_attr_min_length (rtx_insn *insn)
429 {
430 return get_attr_length_1 (insn, insn_min_length);
431 }
432 \f
433 /* Code to handle alignment inside shorten_branches. */
434
435 /* Here is an explanation how the algorithm in align_fuzz can give
436 proper results:
437
438 Call a sequence of instructions beginning with alignment point X
439 and continuing until the next alignment point `block X'. When `X'
440 is used in an expression, it means the alignment value of the
441 alignment point.
442
443 Call the distance between the start of the first insn of block X, and
444 the end of the last insn of block X `IX', for the `inner size of X'.
445 This is clearly the sum of the instruction lengths.
446
447 Likewise with the next alignment-delimited block following X, which we
448 shall call block Y.
449
450 Call the distance between the start of the first insn of block X, and
451 the start of the first insn of block Y `OX', for the `outer size of X'.
452
453 The estimated padding is then OX - IX.
454
455 OX can be safely estimated as
456
457 if (X >= Y)
458 OX = round_up(IX, Y)
459 else
460 OX = round_up(IX, X) + Y - X
461
462 Clearly est(IX) >= real(IX), because that only depends on the
463 instruction lengths, and those being overestimated is a given.
464
465 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
466 we needn't worry about that when thinking about OX.
467
468 When X >= Y, the alignment provided by Y adds no uncertainty factor
469 for branch ranges starting before X, so we can just round what we have.
470 But when X < Y, we don't know anything about the, so to speak,
471 `middle bits', so we have to assume the worst when aligning up from an
472 address mod X to one mod Y, which is Y - X. */
473
474 #ifndef LABEL_ALIGN
475 #define LABEL_ALIGN(LABEL) align_labels_log
476 #endif
477
478 #ifndef LOOP_ALIGN
479 #define LOOP_ALIGN(LABEL) align_loops_log
480 #endif
481
482 #ifndef LABEL_ALIGN_AFTER_BARRIER
483 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
484 #endif
485
486 #ifndef JUMP_ALIGN
487 #define JUMP_ALIGN(LABEL) align_jumps_log
488 #endif
489
490 int
491 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
492 {
493 return 0;
494 }
495
496 int
497 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
498 {
499 return align_loops_max_skip;
500 }
501
502 int
503 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
504 {
505 return align_labels_max_skip;
506 }
507
508 int
509 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
510 {
511 return align_jumps_max_skip;
512 }
513
514 #ifndef ADDR_VEC_ALIGN
515 static int
516 final_addr_vec_align (rtx_insn *addr_vec)
517 {
518 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
519
520 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
521 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
522 return exact_log2 (align);
523
524 }
525
526 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
527 #endif
528
529 #ifndef INSN_LENGTH_ALIGNMENT
530 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
531 #endif
532
533 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
534
535 static int min_labelno, max_labelno;
536
537 #define LABEL_TO_ALIGNMENT(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
539
540 #define LABEL_TO_MAX_SKIP(LABEL) \
541 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
542
543 /* For the benefit of port specific code do this also as a function. */
544
545 int
546 label_to_alignment (rtx label)
547 {
548 if (CODE_LABEL_NUMBER (label) <= max_labelno)
549 return LABEL_TO_ALIGNMENT (label);
550 return 0;
551 }
552
553 int
554 label_to_max_skip (rtx label)
555 {
556 if (CODE_LABEL_NUMBER (label) <= max_labelno)
557 return LABEL_TO_MAX_SKIP (label);
558 return 0;
559 }
560
561 /* The differences in addresses
562 between a branch and its target might grow or shrink depending on
563 the alignment the start insn of the range (the branch for a forward
564 branch or the label for a backward branch) starts out on; if these
565 differences are used naively, they can even oscillate infinitely.
566 We therefore want to compute a 'worst case' address difference that
567 is independent of the alignment the start insn of the range end
568 up on, and that is at least as large as the actual difference.
569 The function align_fuzz calculates the amount we have to add to the
570 naively computed difference, by traversing the part of the alignment
571 chain of the start insn of the range that is in front of the end insn
572 of the range, and considering for each alignment the maximum amount
573 that it might contribute to a size increase.
574
575 For casesi tables, we also want to know worst case minimum amounts of
576 address difference, in case a machine description wants to introduce
577 some common offset that is added to all offsets in a table.
578 For this purpose, align_fuzz with a growth argument of 0 computes the
579 appropriate adjustment. */
580
581 /* Compute the maximum delta by which the difference of the addresses of
582 START and END might grow / shrink due to a different address for start
583 which changes the size of alignment insns between START and END.
584 KNOWN_ALIGN_LOG is the alignment known for START.
585 GROWTH should be ~0 if the objective is to compute potential code size
586 increase, and 0 if the objective is to compute potential shrink.
587 The return value is undefined for any other value of GROWTH. */
588
589 static int
590 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
591 {
592 int uid = INSN_UID (start);
593 rtx align_label;
594 int known_align = 1 << known_align_log;
595 int end_shuid = INSN_SHUID (end);
596 int fuzz = 0;
597
598 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
599 {
600 int align_addr, new_align;
601
602 uid = INSN_UID (align_label);
603 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
604 if (uid_shuid[uid] > end_shuid)
605 break;
606 known_align_log = LABEL_TO_ALIGNMENT (align_label);
607 new_align = 1 << known_align_log;
608 if (new_align < known_align)
609 continue;
610 fuzz += (-align_addr ^ growth) & (new_align - known_align);
611 known_align = new_align;
612 }
613 return fuzz;
614 }
615
616 /* Compute a worst-case reference address of a branch so that it
617 can be safely used in the presence of aligned labels. Since the
618 size of the branch itself is unknown, the size of the branch is
619 not included in the range. I.e. for a forward branch, the reference
620 address is the end address of the branch as known from the previous
621 branch shortening pass, minus a value to account for possible size
622 increase due to alignment. For a backward branch, it is the start
623 address of the branch as known from the current pass, plus a value
624 to account for possible size increase due to alignment.
625 NB.: Therefore, the maximum offset allowed for backward branches needs
626 to exclude the branch size. */
627
628 int
629 insn_current_reference_address (rtx_insn *branch)
630 {
631 rtx dest;
632 int seq_uid;
633
634 if (! INSN_ADDRESSES_SET_P ())
635 return 0;
636
637 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
638 seq_uid = INSN_UID (seq);
639 if (!JUMP_P (branch))
640 /* This can happen for example on the PA; the objective is to know the
641 offset to address something in front of the start of the function.
642 Thus, we can treat it like a backward branch.
643 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
644 any alignment we'd encounter, so we skip the call to align_fuzz. */
645 return insn_current_address;
646 dest = JUMP_LABEL (branch);
647
648 /* BRANCH has no proper alignment chain set, so use SEQ.
649 BRANCH also has no INSN_SHUID. */
650 if (INSN_SHUID (seq) < INSN_SHUID (dest))
651 {
652 /* Forward branch. */
653 return (insn_last_address + insn_lengths[seq_uid]
654 - align_fuzz (seq, dest, length_unit_log, ~0));
655 }
656 else
657 {
658 /* Backward branch. */
659 return (insn_current_address
660 + align_fuzz (dest, seq, length_unit_log, ~0));
661 }
662 }
663 \f
664 /* Compute branch alignments based on frequency information in the
665 CFG. */
666
667 unsigned int
668 compute_alignments (void)
669 {
670 int log, max_skip, max_log;
671 basic_block bb;
672 int freq_max = 0;
673 int freq_threshold = 0;
674
675 if (label_align)
676 {
677 free (label_align);
678 label_align = 0;
679 }
680
681 max_labelno = max_label_num ();
682 min_labelno = get_first_label_num ();
683 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
684
685 /* If not optimizing or optimizing for size, don't assign any alignments. */
686 if (! optimize || optimize_function_for_size_p (cfun))
687 return 0;
688
689 if (dump_file)
690 {
691 dump_reg_info (dump_file);
692 dump_flow_info (dump_file, TDF_DETAILS);
693 flow_loops_dump (dump_file, NULL, 1);
694 }
695 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
696 FOR_EACH_BB_FN (bb, cfun)
697 if (bb->frequency > freq_max)
698 freq_max = bb->frequency;
699 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
700
701 if (dump_file)
702 fprintf (dump_file, "freq_max: %i\n",freq_max);
703 FOR_EACH_BB_FN (bb, cfun)
704 {
705 rtx_insn *label = BB_HEAD (bb);
706 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
707 edge e;
708 edge_iterator ei;
709
710 if (!LABEL_P (label)
711 || optimize_bb_for_size_p (bb))
712 {
713 if (dump_file)
714 fprintf (dump_file,
715 "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
716 bb->index, bb->frequency, bb->loop_father->num,
717 bb_loop_depth (bb));
718 continue;
719 }
720 max_log = LABEL_ALIGN (label);
721 max_skip = targetm.asm_out.label_align_max_skip (label);
722
723 FOR_EACH_EDGE (e, ei, bb->preds)
724 {
725 if (e->flags & EDGE_FALLTHRU)
726 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
727 else
728 branch_frequency += EDGE_FREQUENCY (e);
729 }
730 if (dump_file)
731 {
732 fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
733 " %2i fall %4i branch %4i",
734 bb->index, bb->frequency, bb->loop_father->num,
735 bb_loop_depth (bb),
736 fallthru_frequency, branch_frequency);
737 if (!bb->loop_father->inner && bb->loop_father->num)
738 fprintf (dump_file, " inner_loop");
739 if (bb->loop_father->header == bb)
740 fprintf (dump_file, " loop_header");
741 fprintf (dump_file, "\n");
742 }
743
744 /* There are two purposes to align block with no fallthru incoming edge:
745 1) to avoid fetch stalls when branch destination is near cache boundary
746 2) to improve cache efficiency in case the previous block is not executed
747 (so it does not need to be in the cache).
748
749 We to catch first case, we align frequently executed blocks.
750 To catch the second, we align blocks that are executed more frequently
751 than the predecessor and the predecessor is likely to not be executed
752 when function is called. */
753
754 if (!has_fallthru
755 && (branch_frequency > freq_threshold
756 || (bb->frequency > bb->prev_bb->frequency * 10
757 && (bb->prev_bb->frequency
758 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
759 {
760 log = JUMP_ALIGN (label);
761 if (dump_file)
762 fprintf (dump_file, " jump alignment added.\n");
763 if (max_log < log)
764 {
765 max_log = log;
766 max_skip = targetm.asm_out.jump_align_max_skip (label);
767 }
768 }
769 /* In case block is frequent and reached mostly by non-fallthru edge,
770 align it. It is most likely a first block of loop. */
771 if (has_fallthru
772 && !(single_succ_p (bb)
773 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
774 && optimize_bb_for_speed_p (bb)
775 && branch_frequency + fallthru_frequency > freq_threshold
776 && (branch_frequency
777 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
778 {
779 log = LOOP_ALIGN (label);
780 if (dump_file)
781 fprintf (dump_file, " internal loop alignment added.\n");
782 if (max_log < log)
783 {
784 max_log = log;
785 max_skip = targetm.asm_out.loop_align_max_skip (label);
786 }
787 }
788 LABEL_TO_ALIGNMENT (label) = max_log;
789 LABEL_TO_MAX_SKIP (label) = max_skip;
790 }
791
792 loop_optimizer_finalize ();
793 free_dominance_info (CDI_DOMINATORS);
794 return 0;
795 }
796
797 /* Grow the LABEL_ALIGN array after new labels are created. */
798
799 static void
800 grow_label_align (void)
801 {
802 int old = max_labelno;
803 int n_labels;
804 int n_old_labels;
805
806 max_labelno = max_label_num ();
807
808 n_labels = max_labelno - min_labelno + 1;
809 n_old_labels = old - min_labelno + 1;
810
811 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
812
813 /* Range of labels grows monotonically in the function. Failing here
814 means that the initialization of array got lost. */
815 gcc_assert (n_old_labels <= n_labels);
816
817 memset (label_align + n_old_labels, 0,
818 (n_labels - n_old_labels) * sizeof (struct label_alignment));
819 }
820
821 /* Update the already computed alignment information. LABEL_PAIRS is a vector
822 made up of pairs of labels for which the alignment information of the first
823 element will be copied from that of the second element. */
824
825 void
826 update_alignments (vec<rtx> &label_pairs)
827 {
828 unsigned int i = 0;
829 rtx iter, label = NULL_RTX;
830
831 if (max_labelno != max_label_num ())
832 grow_label_align ();
833
834 FOR_EACH_VEC_ELT (label_pairs, i, iter)
835 if (i & 1)
836 {
837 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
838 LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
839 }
840 else
841 label = iter;
842 }
843
844 namespace {
845
846 const pass_data pass_data_compute_alignments =
847 {
848 RTL_PASS, /* type */
849 "alignments", /* name */
850 OPTGROUP_NONE, /* optinfo_flags */
851 TV_NONE, /* tv_id */
852 0, /* properties_required */
853 0, /* properties_provided */
854 0, /* properties_destroyed */
855 0, /* todo_flags_start */
856 0, /* todo_flags_finish */
857 };
858
859 class pass_compute_alignments : public rtl_opt_pass
860 {
861 public:
862 pass_compute_alignments (gcc::context *ctxt)
863 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
864 {}
865
866 /* opt_pass methods: */
867 virtual unsigned int execute (function *) { return compute_alignments (); }
868
869 }; // class pass_compute_alignments
870
871 } // anon namespace
872
873 rtl_opt_pass *
874 make_pass_compute_alignments (gcc::context *ctxt)
875 {
876 return new pass_compute_alignments (ctxt);
877 }
878
879 \f
880 /* Make a pass over all insns and compute their actual lengths by shortening
881 any branches of variable length if possible. */
882
883 /* shorten_branches might be called multiple times: for example, the SH
884 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
885 In order to do this, it needs proper length information, which it obtains
886 by calling shorten_branches. This cannot be collapsed with
887 shorten_branches itself into a single pass unless we also want to integrate
888 reorg.c, since the branch splitting exposes new instructions with delay
889 slots. */
890
891 void
892 shorten_branches (rtx_insn *first)
893 {
894 rtx_insn *insn;
895 int max_uid;
896 int i;
897 int max_log;
898 int max_skip;
899 #define MAX_CODE_ALIGN 16
900 rtx_insn *seq;
901 int something_changed = 1;
902 char *varying_length;
903 rtx body;
904 int uid;
905 rtx align_tab[MAX_CODE_ALIGN];
906
907 /* Compute maximum UID and allocate label_align / uid_shuid. */
908 max_uid = get_max_uid ();
909
910 /* Free uid_shuid before reallocating it. */
911 free (uid_shuid);
912
913 uid_shuid = XNEWVEC (int, max_uid);
914
915 if (max_labelno != max_label_num ())
916 grow_label_align ();
917
918 /* Initialize label_align and set up uid_shuid to be strictly
919 monotonically rising with insn order. */
920 /* We use max_log here to keep track of the maximum alignment we want to
921 impose on the next CODE_LABEL (or the current one if we are processing
922 the CODE_LABEL itself). */
923
924 max_log = 0;
925 max_skip = 0;
926
927 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
928 {
929 int log;
930
931 INSN_SHUID (insn) = i++;
932 if (INSN_P (insn))
933 continue;
934
935 if (LABEL_P (insn))
936 {
937 rtx_insn *next;
938 bool next_is_jumptable;
939
940 /* Merge in alignments computed by compute_alignments. */
941 log = LABEL_TO_ALIGNMENT (insn);
942 if (max_log < log)
943 {
944 max_log = log;
945 max_skip = LABEL_TO_MAX_SKIP (insn);
946 }
947
948 next = next_nonnote_insn (insn);
949 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
950 if (!next_is_jumptable)
951 {
952 log = LABEL_ALIGN (insn);
953 if (max_log < log)
954 {
955 max_log = log;
956 max_skip = targetm.asm_out.label_align_max_skip (insn);
957 }
958 }
959 /* ADDR_VECs only take room if read-only data goes into the text
960 section. */
961 if ((JUMP_TABLES_IN_TEXT_SECTION
962 || readonly_data_section == text_section)
963 && next_is_jumptable)
964 {
965 log = ADDR_VEC_ALIGN (next);
966 if (max_log < log)
967 {
968 max_log = log;
969 max_skip = targetm.asm_out.label_align_max_skip (insn);
970 }
971 }
972 LABEL_TO_ALIGNMENT (insn) = max_log;
973 LABEL_TO_MAX_SKIP (insn) = max_skip;
974 max_log = 0;
975 max_skip = 0;
976 }
977 else if (BARRIER_P (insn))
978 {
979 rtx_insn *label;
980
981 for (label = insn; label && ! INSN_P (label);
982 label = NEXT_INSN (label))
983 if (LABEL_P (label))
984 {
985 log = LABEL_ALIGN_AFTER_BARRIER (insn);
986 if (max_log < log)
987 {
988 max_log = log;
989 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
990 }
991 break;
992 }
993 }
994 }
995 if (!HAVE_ATTR_length)
996 return;
997
998 /* Allocate the rest of the arrays. */
999 insn_lengths = XNEWVEC (int, max_uid);
1000 insn_lengths_max_uid = max_uid;
1001 /* Syntax errors can lead to labels being outside of the main insn stream.
1002 Initialize insn_addresses, so that we get reproducible results. */
1003 INSN_ADDRESSES_ALLOC (max_uid);
1004
1005 varying_length = XCNEWVEC (char, max_uid);
1006
1007 /* Initialize uid_align. We scan instructions
1008 from end to start, and keep in align_tab[n] the last seen insn
1009 that does an alignment of at least n+1, i.e. the successor
1010 in the alignment chain for an insn that does / has a known
1011 alignment of n. */
1012 uid_align = XCNEWVEC (rtx, max_uid);
1013
1014 for (i = MAX_CODE_ALIGN; --i >= 0;)
1015 align_tab[i] = NULL_RTX;
1016 seq = get_last_insn ();
1017 for (; seq; seq = PREV_INSN (seq))
1018 {
1019 int uid = INSN_UID (seq);
1020 int log;
1021 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1022 uid_align[uid] = align_tab[0];
1023 if (log)
1024 {
1025 /* Found an alignment label. */
1026 uid_align[uid] = align_tab[log];
1027 for (i = log - 1; i >= 0; i--)
1028 align_tab[i] = seq;
1029 }
1030 }
1031
1032 /* When optimizing, we start assuming minimum length, and keep increasing
1033 lengths as we find the need for this, till nothing changes.
1034 When not optimizing, we start assuming maximum lengths, and
1035 do a single pass to update the lengths. */
1036 bool increasing = optimize != 0;
1037
1038 #ifdef CASE_VECTOR_SHORTEN_MODE
1039 if (optimize)
1040 {
1041 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1042 label fields. */
1043
1044 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1045 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1046 int rel;
1047
1048 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1049 {
1050 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1051 int len, i, min, max, insn_shuid;
1052 int min_align;
1053 addr_diff_vec_flags flags;
1054
1055 if (! JUMP_TABLE_DATA_P (insn)
1056 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1057 continue;
1058 pat = PATTERN (insn);
1059 len = XVECLEN (pat, 1);
1060 gcc_assert (len > 0);
1061 min_align = MAX_CODE_ALIGN;
1062 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1063 {
1064 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1065 int shuid = INSN_SHUID (lab);
1066 if (shuid < min)
1067 {
1068 min = shuid;
1069 min_lab = lab;
1070 }
1071 if (shuid > max)
1072 {
1073 max = shuid;
1074 max_lab = lab;
1075 }
1076 if (min_align > LABEL_TO_ALIGNMENT (lab))
1077 min_align = LABEL_TO_ALIGNMENT (lab);
1078 }
1079 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1080 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1081 insn_shuid = INSN_SHUID (insn);
1082 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1083 memset (&flags, 0, sizeof (flags));
1084 flags.min_align = min_align;
1085 flags.base_after_vec = rel > insn_shuid;
1086 flags.min_after_vec = min > insn_shuid;
1087 flags.max_after_vec = max > insn_shuid;
1088 flags.min_after_base = min > rel;
1089 flags.max_after_base = max > rel;
1090 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1091
1092 if (increasing)
1093 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1094 }
1095 }
1096 #endif /* CASE_VECTOR_SHORTEN_MODE */
1097
1098 /* Compute initial lengths, addresses, and varying flags for each insn. */
1099 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1100
1101 for (insn_current_address = 0, insn = first;
1102 insn != 0;
1103 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1104 {
1105 uid = INSN_UID (insn);
1106
1107 insn_lengths[uid] = 0;
1108
1109 if (LABEL_P (insn))
1110 {
1111 int log = LABEL_TO_ALIGNMENT (insn);
1112 if (log)
1113 {
1114 int align = 1 << log;
1115 int new_address = (insn_current_address + align - 1) & -align;
1116 insn_lengths[uid] = new_address - insn_current_address;
1117 }
1118 }
1119
1120 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1121
1122 if (NOTE_P (insn) || BARRIER_P (insn)
1123 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1124 continue;
1125 if (insn->deleted ())
1126 continue;
1127
1128 body = PATTERN (insn);
1129 if (JUMP_TABLE_DATA_P (insn))
1130 {
1131 /* This only takes room if read-only data goes into the text
1132 section. */
1133 if (JUMP_TABLES_IN_TEXT_SECTION
1134 || readonly_data_section == text_section)
1135 insn_lengths[uid] = (XVECLEN (body,
1136 GET_CODE (body) == ADDR_DIFF_VEC)
1137 * GET_MODE_SIZE (GET_MODE (body)));
1138 /* Alignment is handled by ADDR_VEC_ALIGN. */
1139 }
1140 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1141 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1142 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1143 {
1144 int i;
1145 int const_delay_slots;
1146 if (DELAY_SLOTS)
1147 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1148 else
1149 const_delay_slots = 0;
1150
1151 int (*inner_length_fun) (rtx_insn *)
1152 = const_delay_slots ? length_fun : insn_default_length;
1153 /* Inside a delay slot sequence, we do not do any branch shortening
1154 if the shortening could change the number of delay slots
1155 of the branch. */
1156 for (i = 0; i < body_seq->len (); i++)
1157 {
1158 rtx_insn *inner_insn = body_seq->insn (i);
1159 int inner_uid = INSN_UID (inner_insn);
1160 int inner_length;
1161
1162 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1163 || asm_noperands (PATTERN (inner_insn)) >= 0)
1164 inner_length = (asm_insn_count (PATTERN (inner_insn))
1165 * insn_default_length (inner_insn));
1166 else
1167 inner_length = inner_length_fun (inner_insn);
1168
1169 insn_lengths[inner_uid] = inner_length;
1170 if (const_delay_slots)
1171 {
1172 if ((varying_length[inner_uid]
1173 = insn_variable_length_p (inner_insn)) != 0)
1174 varying_length[uid] = 1;
1175 INSN_ADDRESSES (inner_uid) = (insn_current_address
1176 + insn_lengths[uid]);
1177 }
1178 else
1179 varying_length[inner_uid] = 0;
1180 insn_lengths[uid] += inner_length;
1181 }
1182 }
1183 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1184 {
1185 insn_lengths[uid] = length_fun (insn);
1186 varying_length[uid] = insn_variable_length_p (insn);
1187 }
1188
1189 /* If needed, do any adjustment. */
1190 #ifdef ADJUST_INSN_LENGTH
1191 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1192 if (insn_lengths[uid] < 0)
1193 fatal_insn ("negative insn length", insn);
1194 #endif
1195 }
1196
1197 /* Now loop over all the insns finding varying length insns. For each,
1198 get the current insn length. If it has changed, reflect the change.
1199 When nothing changes for a full pass, we are done. */
1200
1201 while (something_changed)
1202 {
1203 something_changed = 0;
1204 insn_current_align = MAX_CODE_ALIGN - 1;
1205 for (insn_current_address = 0, insn = first;
1206 insn != 0;
1207 insn = NEXT_INSN (insn))
1208 {
1209 int new_length;
1210 #ifdef ADJUST_INSN_LENGTH
1211 int tmp_length;
1212 #endif
1213 int length_align;
1214
1215 uid = INSN_UID (insn);
1216
1217 if (LABEL_P (insn))
1218 {
1219 int log = LABEL_TO_ALIGNMENT (insn);
1220
1221 #ifdef CASE_VECTOR_SHORTEN_MODE
1222 /* If the mode of a following jump table was changed, we
1223 may need to update the alignment of this label. */
1224 rtx_insn *next;
1225 bool next_is_jumptable;
1226
1227 next = next_nonnote_insn (insn);
1228 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1229 if ((JUMP_TABLES_IN_TEXT_SECTION
1230 || readonly_data_section == text_section)
1231 && next_is_jumptable)
1232 {
1233 int newlog = ADDR_VEC_ALIGN (next);
1234 if (newlog != log)
1235 {
1236 log = newlog;
1237 LABEL_TO_ALIGNMENT (insn) = log;
1238 something_changed = 1;
1239 }
1240 }
1241 #endif
1242
1243 if (log > insn_current_align)
1244 {
1245 int align = 1 << log;
1246 int new_address= (insn_current_address + align - 1) & -align;
1247 insn_lengths[uid] = new_address - insn_current_address;
1248 insn_current_align = log;
1249 insn_current_address = new_address;
1250 }
1251 else
1252 insn_lengths[uid] = 0;
1253 INSN_ADDRESSES (uid) = insn_current_address;
1254 continue;
1255 }
1256
1257 length_align = INSN_LENGTH_ALIGNMENT (insn);
1258 if (length_align < insn_current_align)
1259 insn_current_align = length_align;
1260
1261 insn_last_address = INSN_ADDRESSES (uid);
1262 INSN_ADDRESSES (uid) = insn_current_address;
1263
1264 #ifdef CASE_VECTOR_SHORTEN_MODE
1265 if (optimize
1266 && JUMP_TABLE_DATA_P (insn)
1267 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1268 {
1269 rtx body = PATTERN (insn);
1270 int old_length = insn_lengths[uid];
1271 rtx_insn *rel_lab =
1272 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1273 rtx min_lab = XEXP (XEXP (body, 2), 0);
1274 rtx max_lab = XEXP (XEXP (body, 3), 0);
1275 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1276 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1277 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1278 rtx_insn *prev;
1279 int rel_align = 0;
1280 addr_diff_vec_flags flags;
1281 machine_mode vec_mode;
1282
1283 /* Avoid automatic aggregate initialization. */
1284 flags = ADDR_DIFF_VEC_FLAGS (body);
1285
1286 /* Try to find a known alignment for rel_lab. */
1287 for (prev = rel_lab;
1288 prev
1289 && ! insn_lengths[INSN_UID (prev)]
1290 && ! (varying_length[INSN_UID (prev)] & 1);
1291 prev = PREV_INSN (prev))
1292 if (varying_length[INSN_UID (prev)] & 2)
1293 {
1294 rel_align = LABEL_TO_ALIGNMENT (prev);
1295 break;
1296 }
1297
1298 /* See the comment on addr_diff_vec_flags in rtl.h for the
1299 meaning of the flags values. base: REL_LAB vec: INSN */
1300 /* Anything after INSN has still addresses from the last
1301 pass; adjust these so that they reflect our current
1302 estimate for this pass. */
1303 if (flags.base_after_vec)
1304 rel_addr += insn_current_address - insn_last_address;
1305 if (flags.min_after_vec)
1306 min_addr += insn_current_address - insn_last_address;
1307 if (flags.max_after_vec)
1308 max_addr += insn_current_address - insn_last_address;
1309 /* We want to know the worst case, i.e. lowest possible value
1310 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1311 its offset is positive, and we have to be wary of code shrink;
1312 otherwise, it is negative, and we have to be vary of code
1313 size increase. */
1314 if (flags.min_after_base)
1315 {
1316 /* If INSN is between REL_LAB and MIN_LAB, the size
1317 changes we are about to make can change the alignment
1318 within the observed offset, therefore we have to break
1319 it up into two parts that are independent. */
1320 if (! flags.base_after_vec && flags.min_after_vec)
1321 {
1322 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1323 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1324 }
1325 else
1326 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1327 }
1328 else
1329 {
1330 if (flags.base_after_vec && ! flags.min_after_vec)
1331 {
1332 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1333 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1334 }
1335 else
1336 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1337 }
1338 /* Likewise, determine the highest lowest possible value
1339 for the offset of MAX_LAB. */
1340 if (flags.max_after_base)
1341 {
1342 if (! flags.base_after_vec && flags.max_after_vec)
1343 {
1344 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1345 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1346 }
1347 else
1348 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1349 }
1350 else
1351 {
1352 if (flags.base_after_vec && ! flags.max_after_vec)
1353 {
1354 max_addr += align_fuzz (max_lab, insn, 0, 0);
1355 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1356 }
1357 else
1358 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1359 }
1360 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1361 max_addr - rel_addr, body);
1362 if (!increasing
1363 || (GET_MODE_SIZE (vec_mode)
1364 >= GET_MODE_SIZE (GET_MODE (body))))
1365 PUT_MODE (body, vec_mode);
1366 if (JUMP_TABLES_IN_TEXT_SECTION
1367 || readonly_data_section == text_section)
1368 {
1369 insn_lengths[uid]
1370 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1371 insn_current_address += insn_lengths[uid];
1372 if (insn_lengths[uid] != old_length)
1373 something_changed = 1;
1374 }
1375
1376 continue;
1377 }
1378 #endif /* CASE_VECTOR_SHORTEN_MODE */
1379
1380 if (! (varying_length[uid]))
1381 {
1382 if (NONJUMP_INSN_P (insn)
1383 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1384 {
1385 int i;
1386
1387 body = PATTERN (insn);
1388 for (i = 0; i < XVECLEN (body, 0); i++)
1389 {
1390 rtx inner_insn = XVECEXP (body, 0, i);
1391 int inner_uid = INSN_UID (inner_insn);
1392
1393 INSN_ADDRESSES (inner_uid) = insn_current_address;
1394
1395 insn_current_address += insn_lengths[inner_uid];
1396 }
1397 }
1398 else
1399 insn_current_address += insn_lengths[uid];
1400
1401 continue;
1402 }
1403
1404 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1405 {
1406 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1407 int i;
1408
1409 body = PATTERN (insn);
1410 new_length = 0;
1411 for (i = 0; i < seqn->len (); i++)
1412 {
1413 rtx_insn *inner_insn = seqn->insn (i);
1414 int inner_uid = INSN_UID (inner_insn);
1415 int inner_length;
1416
1417 INSN_ADDRESSES (inner_uid) = insn_current_address;
1418
1419 /* insn_current_length returns 0 for insns with a
1420 non-varying length. */
1421 if (! varying_length[inner_uid])
1422 inner_length = insn_lengths[inner_uid];
1423 else
1424 inner_length = insn_current_length (inner_insn);
1425
1426 if (inner_length != insn_lengths[inner_uid])
1427 {
1428 if (!increasing || inner_length > insn_lengths[inner_uid])
1429 {
1430 insn_lengths[inner_uid] = inner_length;
1431 something_changed = 1;
1432 }
1433 else
1434 inner_length = insn_lengths[inner_uid];
1435 }
1436 insn_current_address += inner_length;
1437 new_length += inner_length;
1438 }
1439 }
1440 else
1441 {
1442 new_length = insn_current_length (insn);
1443 insn_current_address += new_length;
1444 }
1445
1446 #ifdef ADJUST_INSN_LENGTH
1447 /* If needed, do any adjustment. */
1448 tmp_length = new_length;
1449 ADJUST_INSN_LENGTH (insn, new_length);
1450 insn_current_address += (new_length - tmp_length);
1451 #endif
1452
1453 if (new_length != insn_lengths[uid]
1454 && (!increasing || new_length > insn_lengths[uid]))
1455 {
1456 insn_lengths[uid] = new_length;
1457 something_changed = 1;
1458 }
1459 else
1460 insn_current_address += insn_lengths[uid] - new_length;
1461 }
1462 /* For a non-optimizing compile, do only a single pass. */
1463 if (!increasing)
1464 break;
1465 }
1466 crtl->max_insn_address = insn_current_address;
1467 free (varying_length);
1468 }
1469
1470 /* Given the body of an INSN known to be generated by an ASM statement, return
1471 the number of machine instructions likely to be generated for this insn.
1472 This is used to compute its length. */
1473
1474 static int
1475 asm_insn_count (rtx body)
1476 {
1477 const char *templ;
1478
1479 if (GET_CODE (body) == ASM_INPUT)
1480 templ = XSTR (body, 0);
1481 else
1482 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1483
1484 return asm_str_count (templ);
1485 }
1486
1487 /* Return the number of machine instructions likely to be generated for the
1488 inline-asm template. */
1489 int
1490 asm_str_count (const char *templ)
1491 {
1492 int count = 1;
1493
1494 if (!*templ)
1495 return 0;
1496
1497 for (; *templ; templ++)
1498 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1499 || *templ == '\n')
1500 count++;
1501
1502 return count;
1503 }
1504 \f
1505 /* ??? This is probably the wrong place for these. */
1506 /* Structure recording the mapping from source file and directory
1507 names at compile time to those to be embedded in debug
1508 information. */
1509 struct debug_prefix_map
1510 {
1511 const char *old_prefix;
1512 const char *new_prefix;
1513 size_t old_len;
1514 size_t new_len;
1515 struct debug_prefix_map *next;
1516 };
1517
1518 /* Linked list of such structures. */
1519 static debug_prefix_map *debug_prefix_maps;
1520
1521
1522 /* Record a debug file prefix mapping. ARG is the argument to
1523 -fdebug-prefix-map and must be of the form OLD=NEW. */
1524
1525 void
1526 add_debug_prefix_map (const char *arg)
1527 {
1528 debug_prefix_map *map;
1529 const char *p;
1530
1531 p = strchr (arg, '=');
1532 if (!p)
1533 {
1534 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1535 return;
1536 }
1537 map = XNEW (debug_prefix_map);
1538 map->old_prefix = xstrndup (arg, p - arg);
1539 map->old_len = p - arg;
1540 p++;
1541 map->new_prefix = xstrdup (p);
1542 map->new_len = strlen (p);
1543 map->next = debug_prefix_maps;
1544 debug_prefix_maps = map;
1545 }
1546
1547 /* Perform user-specified mapping of debug filename prefixes. Return
1548 the new name corresponding to FILENAME. */
1549
1550 const char *
1551 remap_debug_filename (const char *filename)
1552 {
1553 debug_prefix_map *map;
1554 char *s;
1555 const char *name;
1556 size_t name_len;
1557
1558 for (map = debug_prefix_maps; map; map = map->next)
1559 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1560 break;
1561 if (!map)
1562 return filename;
1563 name = filename + map->old_len;
1564 name_len = strlen (name) + 1;
1565 s = (char *) alloca (name_len + map->new_len);
1566 memcpy (s, map->new_prefix, map->new_len);
1567 memcpy (s + map->new_len, name, name_len);
1568 return ggc_strdup (s);
1569 }
1570 \f
1571 /* Return true if DWARF2 debug info can be emitted for DECL. */
1572
1573 static bool
1574 dwarf2_debug_info_emitted_p (tree decl)
1575 {
1576 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1577 return false;
1578
1579 if (DECL_IGNORED_P (decl))
1580 return false;
1581
1582 return true;
1583 }
1584
1585 /* Return scope resulting from combination of S1 and S2. */
1586 static tree
1587 choose_inner_scope (tree s1, tree s2)
1588 {
1589 if (!s1)
1590 return s2;
1591 if (!s2)
1592 return s1;
1593 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1594 return s1;
1595 return s2;
1596 }
1597
1598 /* Emit lexical block notes needed to change scope from S1 to S2. */
1599
1600 static void
1601 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1602 {
1603 rtx_insn *insn = orig_insn;
1604 tree com = NULL_TREE;
1605 tree ts1 = s1, ts2 = s2;
1606 tree s;
1607
1608 while (ts1 != ts2)
1609 {
1610 gcc_assert (ts1 && ts2);
1611 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1612 ts1 = BLOCK_SUPERCONTEXT (ts1);
1613 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1614 ts2 = BLOCK_SUPERCONTEXT (ts2);
1615 else
1616 {
1617 ts1 = BLOCK_SUPERCONTEXT (ts1);
1618 ts2 = BLOCK_SUPERCONTEXT (ts2);
1619 }
1620 }
1621 com = ts1;
1622
1623 /* Close scopes. */
1624 s = s1;
1625 while (s != com)
1626 {
1627 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1628 NOTE_BLOCK (note) = s;
1629 s = BLOCK_SUPERCONTEXT (s);
1630 }
1631
1632 /* Open scopes. */
1633 s = s2;
1634 while (s != com)
1635 {
1636 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1637 NOTE_BLOCK (insn) = s;
1638 s = BLOCK_SUPERCONTEXT (s);
1639 }
1640 }
1641
1642 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1643 on the scope tree and the newly reordered instructions. */
1644
1645 static void
1646 reemit_insn_block_notes (void)
1647 {
1648 tree cur_block = DECL_INITIAL (cfun->decl);
1649 rtx_insn *insn;
1650 rtx_note *note;
1651
1652 insn = get_insns ();
1653 for (; insn; insn = NEXT_INSN (insn))
1654 {
1655 tree this_block;
1656
1657 /* Prevent lexical blocks from straddling section boundaries. */
1658 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1659 {
1660 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1661 s = BLOCK_SUPERCONTEXT (s))
1662 {
1663 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1664 NOTE_BLOCK (note) = s;
1665 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1666 NOTE_BLOCK (note) = s;
1667 }
1668 }
1669
1670 if (!active_insn_p (insn))
1671 continue;
1672
1673 /* Avoid putting scope notes between jump table and its label. */
1674 if (JUMP_TABLE_DATA_P (insn))
1675 continue;
1676
1677 this_block = insn_scope (insn);
1678 /* For sequences compute scope resulting from merging all scopes
1679 of instructions nested inside. */
1680 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1681 {
1682 int i;
1683
1684 this_block = NULL;
1685 for (i = 0; i < body->len (); i++)
1686 this_block = choose_inner_scope (this_block,
1687 insn_scope (body->insn (i)));
1688 }
1689 if (! this_block)
1690 {
1691 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1692 continue;
1693 else
1694 this_block = DECL_INITIAL (cfun->decl);
1695 }
1696
1697 if (this_block != cur_block)
1698 {
1699 change_scope (insn, cur_block, this_block);
1700 cur_block = this_block;
1701 }
1702 }
1703
1704 /* change_scope emits before the insn, not after. */
1705 note = emit_note (NOTE_INSN_DELETED);
1706 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1707 delete_insn (note);
1708
1709 reorder_blocks ();
1710 }
1711
1712 static const char *some_local_dynamic_name;
1713
1714 /* Locate some local-dynamic symbol still in use by this function
1715 so that we can print its name in local-dynamic base patterns.
1716 Return null if there are no local-dynamic references. */
1717
1718 const char *
1719 get_some_local_dynamic_name ()
1720 {
1721 subrtx_iterator::array_type array;
1722 rtx_insn *insn;
1723
1724 if (some_local_dynamic_name)
1725 return some_local_dynamic_name;
1726
1727 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1728 if (NONDEBUG_INSN_P (insn))
1729 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1730 {
1731 const_rtx x = *iter;
1732 if (GET_CODE (x) == SYMBOL_REF)
1733 {
1734 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1735 return some_local_dynamic_name = XSTR (x, 0);
1736 if (CONSTANT_POOL_ADDRESS_P (x))
1737 iter.substitute (get_pool_constant (x));
1738 }
1739 }
1740
1741 return 0;
1742 }
1743
1744 /* Output assembler code for the start of a function,
1745 and initialize some of the variables in this file
1746 for the new function. The label for the function and associated
1747 assembler pseudo-ops have already been output in `assemble_start_function'.
1748
1749 FIRST is the first insn of the rtl for the function being compiled.
1750 FILE is the file to write assembler code to.
1751 OPTIMIZE_P is nonzero if we should eliminate redundant
1752 test and compare insns. */
1753
1754 void
1755 final_start_function (rtx_insn *first, FILE *file,
1756 int optimize_p ATTRIBUTE_UNUSED)
1757 {
1758 block_depth = 0;
1759
1760 this_is_asm_operands = 0;
1761
1762 need_profile_function = false;
1763
1764 last_filename = LOCATION_FILE (prologue_location);
1765 last_linenum = LOCATION_LINE (prologue_location);
1766 last_discriminator = discriminator = 0;
1767
1768 high_block_linenum = high_function_linenum = last_linenum;
1769
1770 if (flag_sanitize & SANITIZE_ADDRESS)
1771 asan_function_start ();
1772
1773 if (!DECL_IGNORED_P (current_function_decl))
1774 debug_hooks->begin_prologue (last_linenum, last_filename);
1775
1776 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1777 dwarf2out_begin_prologue (0, NULL);
1778
1779 #ifdef LEAF_REG_REMAP
1780 if (crtl->uses_only_leaf_regs)
1781 leaf_renumber_regs (first);
1782 #endif
1783
1784 /* The Sun386i and perhaps other machines don't work right
1785 if the profiling code comes after the prologue. */
1786 if (targetm.profile_before_prologue () && crtl->profile)
1787 {
1788 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1789 && targetm.have_prologue ())
1790 {
1791 rtx_insn *insn;
1792 for (insn = first; insn; insn = NEXT_INSN (insn))
1793 if (!NOTE_P (insn))
1794 {
1795 insn = NULL;
1796 break;
1797 }
1798 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1799 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1800 break;
1801 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1802 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1803 continue;
1804 else
1805 {
1806 insn = NULL;
1807 break;
1808 }
1809
1810 if (insn)
1811 need_profile_function = true;
1812 else
1813 profile_function (file);
1814 }
1815 else
1816 profile_function (file);
1817 }
1818
1819 /* If debugging, assign block numbers to all of the blocks in this
1820 function. */
1821 if (write_symbols)
1822 {
1823 reemit_insn_block_notes ();
1824 number_blocks (current_function_decl);
1825 /* We never actually put out begin/end notes for the top-level
1826 block in the function. But, conceptually, that block is
1827 always needed. */
1828 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1829 }
1830
1831 if (warn_frame_larger_than
1832 && get_frame_size () > frame_larger_than_size)
1833 {
1834 /* Issue a warning */
1835 warning (OPT_Wframe_larger_than_,
1836 "the frame size of %wd bytes is larger than %wd bytes",
1837 get_frame_size (), frame_larger_than_size);
1838 }
1839
1840 /* First output the function prologue: code to set up the stack frame. */
1841 targetm.asm_out.function_prologue (file, get_frame_size ());
1842
1843 /* If the machine represents the prologue as RTL, the profiling code must
1844 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1845 if (! targetm.have_prologue ())
1846 profile_after_prologue (file);
1847 }
1848
1849 static void
1850 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1851 {
1852 if (!targetm.profile_before_prologue () && crtl->profile)
1853 profile_function (file);
1854 }
1855
1856 static void
1857 profile_function (FILE *file ATTRIBUTE_UNUSED)
1858 {
1859 #ifndef NO_PROFILE_COUNTERS
1860 # define NO_PROFILE_COUNTERS 0
1861 #endif
1862 #ifdef ASM_OUTPUT_REG_PUSH
1863 rtx sval = NULL, chain = NULL;
1864
1865 if (cfun->returns_struct)
1866 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1867 true);
1868 if (cfun->static_chain_decl)
1869 chain = targetm.calls.static_chain (current_function_decl, true);
1870 #endif /* ASM_OUTPUT_REG_PUSH */
1871
1872 if (! NO_PROFILE_COUNTERS)
1873 {
1874 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1875 switch_to_section (data_section);
1876 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1877 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1878 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1879 }
1880
1881 switch_to_section (current_function_section ());
1882
1883 #ifdef ASM_OUTPUT_REG_PUSH
1884 if (sval && REG_P (sval))
1885 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1886 if (chain && REG_P (chain))
1887 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1888 #endif
1889
1890 FUNCTION_PROFILER (file, current_function_funcdef_no);
1891
1892 #ifdef ASM_OUTPUT_REG_PUSH
1893 if (chain && REG_P (chain))
1894 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1895 if (sval && REG_P (sval))
1896 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1897 #endif
1898 }
1899
1900 /* Output assembler code for the end of a function.
1901 For clarity, args are same as those of `final_start_function'
1902 even though not all of them are needed. */
1903
1904 void
1905 final_end_function (void)
1906 {
1907 app_disable ();
1908
1909 if (!DECL_IGNORED_P (current_function_decl))
1910 debug_hooks->end_function (high_function_linenum);
1911
1912 /* Finally, output the function epilogue:
1913 code to restore the stack frame and return to the caller. */
1914 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1915
1916 /* And debug output. */
1917 if (!DECL_IGNORED_P (current_function_decl))
1918 debug_hooks->end_epilogue (last_linenum, last_filename);
1919
1920 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1921 && dwarf2out_do_frame ())
1922 dwarf2out_end_epilogue (last_linenum, last_filename);
1923
1924 some_local_dynamic_name = 0;
1925 }
1926 \f
1927
1928 /* Dumper helper for basic block information. FILE is the assembly
1929 output file, and INSN is the instruction being emitted. */
1930
1931 static void
1932 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1933 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1934 {
1935 basic_block bb;
1936
1937 if (!flag_debug_asm)
1938 return;
1939
1940 if (INSN_UID (insn) < bb_map_size
1941 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1942 {
1943 edge e;
1944 edge_iterator ei;
1945
1946 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1947 if (bb->frequency)
1948 fprintf (file, " freq:%d", bb->frequency);
1949 if (bb->count)
1950 fprintf (file, " count:%" PRId64,
1951 bb->count);
1952 fprintf (file, " seq:%d", (*bb_seqn)++);
1953 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1954 FOR_EACH_EDGE (e, ei, bb->preds)
1955 {
1956 dump_edge_info (file, e, TDF_DETAILS, 0);
1957 }
1958 fprintf (file, "\n");
1959 }
1960 if (INSN_UID (insn) < bb_map_size
1961 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1962 {
1963 edge e;
1964 edge_iterator ei;
1965
1966 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1967 FOR_EACH_EDGE (e, ei, bb->succs)
1968 {
1969 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1970 }
1971 fprintf (file, "\n");
1972 }
1973 }
1974
1975 /* Output assembler code for some insns: all or part of a function.
1976 For description of args, see `final_start_function', above. */
1977
1978 void
1979 final (rtx_insn *first, FILE *file, int optimize_p)
1980 {
1981 rtx_insn *insn, *next;
1982 int seen = 0;
1983
1984 /* Used for -dA dump. */
1985 basic_block *start_to_bb = NULL;
1986 basic_block *end_to_bb = NULL;
1987 int bb_map_size = 0;
1988 int bb_seqn = 0;
1989
1990 last_ignored_compare = 0;
1991
1992 if (HAVE_cc0)
1993 for (insn = first; insn; insn = NEXT_INSN (insn))
1994 {
1995 /* If CC tracking across branches is enabled, record the insn which
1996 jumps to each branch only reached from one place. */
1997 if (optimize_p && JUMP_P (insn))
1998 {
1999 rtx lab = JUMP_LABEL (insn);
2000 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2001 {
2002 LABEL_REFS (lab) = insn;
2003 }
2004 }
2005 }
2006
2007 init_recog ();
2008
2009 CC_STATUS_INIT;
2010
2011 if (flag_debug_asm)
2012 {
2013 basic_block bb;
2014
2015 bb_map_size = get_max_uid () + 1;
2016 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2017 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2018
2019 /* There is no cfg for a thunk. */
2020 if (!cfun->is_thunk)
2021 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2022 {
2023 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2024 end_to_bb[INSN_UID (BB_END (bb))] = bb;
2025 }
2026 }
2027
2028 /* Output the insns. */
2029 for (insn = first; insn;)
2030 {
2031 if (HAVE_ATTR_length)
2032 {
2033 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2034 {
2035 /* This can be triggered by bugs elsewhere in the compiler if
2036 new insns are created after init_insn_lengths is called. */
2037 gcc_assert (NOTE_P (insn));
2038 insn_current_address = -1;
2039 }
2040 else
2041 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2042 }
2043
2044 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2045 bb_map_size, &bb_seqn);
2046 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2047 }
2048
2049 if (flag_debug_asm)
2050 {
2051 free (start_to_bb);
2052 free (end_to_bb);
2053 }
2054
2055 /* Remove CFI notes, to avoid compare-debug failures. */
2056 for (insn = first; insn; insn = next)
2057 {
2058 next = NEXT_INSN (insn);
2059 if (NOTE_P (insn)
2060 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2061 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2062 delete_insn (insn);
2063 }
2064 }
2065 \f
2066 const char *
2067 get_insn_template (int code, rtx insn)
2068 {
2069 switch (insn_data[code].output_format)
2070 {
2071 case INSN_OUTPUT_FORMAT_SINGLE:
2072 return insn_data[code].output.single;
2073 case INSN_OUTPUT_FORMAT_MULTI:
2074 return insn_data[code].output.multi[which_alternative];
2075 case INSN_OUTPUT_FORMAT_FUNCTION:
2076 gcc_assert (insn);
2077 return (*insn_data[code].output.function) (recog_data.operand,
2078 as_a <rtx_insn *> (insn));
2079
2080 default:
2081 gcc_unreachable ();
2082 }
2083 }
2084
2085 /* Emit the appropriate declaration for an alternate-entry-point
2086 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2087 LABEL_KIND != LABEL_NORMAL.
2088
2089 The case fall-through in this function is intentional. */
2090 static void
2091 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2092 {
2093 const char *name = LABEL_NAME (insn);
2094
2095 switch (LABEL_KIND (insn))
2096 {
2097 case LABEL_WEAK_ENTRY:
2098 #ifdef ASM_WEAKEN_LABEL
2099 ASM_WEAKEN_LABEL (file, name);
2100 gcc_fallthrough ();
2101 #endif
2102 case LABEL_GLOBAL_ENTRY:
2103 targetm.asm_out.globalize_label (file, name);
2104 gcc_fallthrough ();
2105 case LABEL_STATIC_ENTRY:
2106 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2107 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2108 #endif
2109 ASM_OUTPUT_LABEL (file, name);
2110 break;
2111
2112 case LABEL_NORMAL:
2113 default:
2114 gcc_unreachable ();
2115 }
2116 }
2117
2118 /* Given a CALL_INSN, find and return the nested CALL. */
2119 static rtx
2120 call_from_call_insn (rtx_call_insn *insn)
2121 {
2122 rtx x;
2123 gcc_assert (CALL_P (insn));
2124 x = PATTERN (insn);
2125
2126 while (GET_CODE (x) != CALL)
2127 {
2128 switch (GET_CODE (x))
2129 {
2130 default:
2131 gcc_unreachable ();
2132 case COND_EXEC:
2133 x = COND_EXEC_CODE (x);
2134 break;
2135 case PARALLEL:
2136 x = XVECEXP (x, 0, 0);
2137 break;
2138 case SET:
2139 x = XEXP (x, 1);
2140 break;
2141 }
2142 }
2143 return x;
2144 }
2145
2146 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2147 corresponding source line, if available. */
2148
2149 static void
2150 asm_show_source (const char *filename, int linenum)
2151 {
2152 if (!filename)
2153 return;
2154
2155 int line_size;
2156 const char *line = location_get_source_line (filename, linenum, &line_size);
2157 if (!line)
2158 return;
2159
2160 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2161 /* "line" is not 0-terminated, so we must use line_size. */
2162 fwrite (line, 1, line_size, asm_out_file);
2163 fputc ('\n', asm_out_file);
2164 }
2165
2166 /* The final scan for one insn, INSN.
2167 Args are same as in `final', except that INSN
2168 is the insn being scanned.
2169 Value returned is the next insn to be scanned.
2170
2171 NOPEEPHOLES is the flag to disallow peephole processing (currently
2172 used for within delayed branch sequence output).
2173
2174 SEEN is used to track the end of the prologue, for emitting
2175 debug information. We force the emission of a line note after
2176 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2177
2178 rtx_insn *
2179 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2180 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2181 {
2182 #if HAVE_cc0
2183 rtx set;
2184 #endif
2185 rtx_insn *next;
2186
2187 insn_counter++;
2188
2189 /* Ignore deleted insns. These can occur when we split insns (due to a
2190 template of "#") while not optimizing. */
2191 if (insn->deleted ())
2192 return NEXT_INSN (insn);
2193
2194 switch (GET_CODE (insn))
2195 {
2196 case NOTE:
2197 switch (NOTE_KIND (insn))
2198 {
2199 case NOTE_INSN_DELETED:
2200 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2201 break;
2202
2203 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2204 in_cold_section_p = !in_cold_section_p;
2205
2206 if (dwarf2out_do_frame ())
2207 dwarf2out_switch_text_section ();
2208 else if (!DECL_IGNORED_P (current_function_decl))
2209 debug_hooks->switch_text_section ();
2210
2211 switch_to_section (current_function_section ());
2212 targetm.asm_out.function_switched_text_sections (asm_out_file,
2213 current_function_decl,
2214 in_cold_section_p);
2215 /* Emit a label for the split cold section. Form label name by
2216 suffixing "cold" to the original function's name. */
2217 if (in_cold_section_p)
2218 {
2219 cold_function_name
2220 = clone_function_name (current_function_decl, "cold");
2221 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2222 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2223 IDENTIFIER_POINTER
2224 (cold_function_name),
2225 current_function_decl);
2226 #else
2227 ASM_OUTPUT_LABEL (asm_out_file,
2228 IDENTIFIER_POINTER (cold_function_name));
2229 #endif
2230 }
2231 break;
2232
2233 case NOTE_INSN_BASIC_BLOCK:
2234 if (need_profile_function)
2235 {
2236 profile_function (asm_out_file);
2237 need_profile_function = false;
2238 }
2239
2240 if (targetm.asm_out.unwind_emit)
2241 targetm.asm_out.unwind_emit (asm_out_file, insn);
2242
2243 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2244
2245 break;
2246
2247 case NOTE_INSN_EH_REGION_BEG:
2248 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2249 NOTE_EH_HANDLER (insn));
2250 break;
2251
2252 case NOTE_INSN_EH_REGION_END:
2253 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2254 NOTE_EH_HANDLER (insn));
2255 break;
2256
2257 case NOTE_INSN_PROLOGUE_END:
2258 targetm.asm_out.function_end_prologue (file);
2259 profile_after_prologue (file);
2260
2261 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2262 {
2263 *seen |= SEEN_EMITTED;
2264 force_source_line = true;
2265 }
2266 else
2267 *seen |= SEEN_NOTE;
2268
2269 break;
2270
2271 case NOTE_INSN_EPILOGUE_BEG:
2272 if (!DECL_IGNORED_P (current_function_decl))
2273 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2274 targetm.asm_out.function_begin_epilogue (file);
2275 break;
2276
2277 case NOTE_INSN_CFI:
2278 dwarf2out_emit_cfi (NOTE_CFI (insn));
2279 break;
2280
2281 case NOTE_INSN_CFI_LABEL:
2282 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2283 NOTE_LABEL_NUMBER (insn));
2284 break;
2285
2286 case NOTE_INSN_FUNCTION_BEG:
2287 if (need_profile_function)
2288 {
2289 profile_function (asm_out_file);
2290 need_profile_function = false;
2291 }
2292
2293 app_disable ();
2294 if (!DECL_IGNORED_P (current_function_decl))
2295 debug_hooks->end_prologue (last_linenum, last_filename);
2296
2297 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2298 {
2299 *seen |= SEEN_EMITTED;
2300 force_source_line = true;
2301 }
2302 else
2303 *seen |= SEEN_NOTE;
2304
2305 break;
2306
2307 case NOTE_INSN_BLOCK_BEG:
2308 if (debug_info_level == DINFO_LEVEL_NORMAL
2309 || debug_info_level == DINFO_LEVEL_VERBOSE
2310 || write_symbols == DWARF2_DEBUG
2311 || write_symbols == VMS_AND_DWARF2_DEBUG
2312 || write_symbols == VMS_DEBUG)
2313 {
2314 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2315
2316 app_disable ();
2317 ++block_depth;
2318 high_block_linenum = last_linenum;
2319
2320 /* Output debugging info about the symbol-block beginning. */
2321 if (!DECL_IGNORED_P (current_function_decl))
2322 debug_hooks->begin_block (last_linenum, n);
2323
2324 /* Mark this block as output. */
2325 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2326 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2327 }
2328 if (write_symbols == DBX_DEBUG
2329 || write_symbols == SDB_DEBUG)
2330 {
2331 location_t *locus_ptr
2332 = block_nonartificial_location (NOTE_BLOCK (insn));
2333
2334 if (locus_ptr != NULL)
2335 {
2336 override_filename = LOCATION_FILE (*locus_ptr);
2337 override_linenum = LOCATION_LINE (*locus_ptr);
2338 }
2339 }
2340 break;
2341
2342 case NOTE_INSN_BLOCK_END:
2343 if (debug_info_level == DINFO_LEVEL_NORMAL
2344 || debug_info_level == DINFO_LEVEL_VERBOSE
2345 || write_symbols == DWARF2_DEBUG
2346 || write_symbols == VMS_AND_DWARF2_DEBUG
2347 || write_symbols == VMS_DEBUG)
2348 {
2349 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2350
2351 app_disable ();
2352
2353 /* End of a symbol-block. */
2354 --block_depth;
2355 gcc_assert (block_depth >= 0);
2356
2357 if (!DECL_IGNORED_P (current_function_decl))
2358 debug_hooks->end_block (high_block_linenum, n);
2359 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2360 == in_cold_section_p);
2361 }
2362 if (write_symbols == DBX_DEBUG
2363 || write_symbols == SDB_DEBUG)
2364 {
2365 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2366 location_t *locus_ptr
2367 = block_nonartificial_location (outer_block);
2368
2369 if (locus_ptr != NULL)
2370 {
2371 override_filename = LOCATION_FILE (*locus_ptr);
2372 override_linenum = LOCATION_LINE (*locus_ptr);
2373 }
2374 else
2375 {
2376 override_filename = NULL;
2377 override_linenum = 0;
2378 }
2379 }
2380 break;
2381
2382 case NOTE_INSN_DELETED_LABEL:
2383 /* Emit the label. We may have deleted the CODE_LABEL because
2384 the label could be proved to be unreachable, though still
2385 referenced (in the form of having its address taken. */
2386 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2387 break;
2388
2389 case NOTE_INSN_DELETED_DEBUG_LABEL:
2390 /* Similarly, but need to use different namespace for it. */
2391 if (CODE_LABEL_NUMBER (insn) != -1)
2392 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2393 break;
2394
2395 case NOTE_INSN_VAR_LOCATION:
2396 case NOTE_INSN_CALL_ARG_LOCATION:
2397 if (!DECL_IGNORED_P (current_function_decl))
2398 debug_hooks->var_location (insn);
2399 break;
2400
2401 default:
2402 gcc_unreachable ();
2403 break;
2404 }
2405 break;
2406
2407 case BARRIER:
2408 break;
2409
2410 case CODE_LABEL:
2411 /* The target port might emit labels in the output function for
2412 some insn, e.g. sh.c output_branchy_insn. */
2413 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2414 {
2415 int align = LABEL_TO_ALIGNMENT (insn);
2416 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2417 int max_skip = LABEL_TO_MAX_SKIP (insn);
2418 #endif
2419
2420 if (align && NEXT_INSN (insn))
2421 {
2422 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2423 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2424 #else
2425 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2426 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2427 #else
2428 ASM_OUTPUT_ALIGN (file, align);
2429 #endif
2430 #endif
2431 }
2432 }
2433 CC_STATUS_INIT;
2434
2435 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2436 debug_hooks->label (as_a <rtx_code_label *> (insn));
2437
2438 app_disable ();
2439
2440 next = next_nonnote_insn (insn);
2441 /* If this label is followed by a jump-table, make sure we put
2442 the label in the read-only section. Also possibly write the
2443 label and jump table together. */
2444 if (next != 0 && JUMP_TABLE_DATA_P (next))
2445 {
2446 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2447 /* In this case, the case vector is being moved by the
2448 target, so don't output the label at all. Leave that
2449 to the back end macros. */
2450 #else
2451 if (! JUMP_TABLES_IN_TEXT_SECTION)
2452 {
2453 int log_align;
2454
2455 switch_to_section (targetm.asm_out.function_rodata_section
2456 (current_function_decl));
2457
2458 #ifdef ADDR_VEC_ALIGN
2459 log_align = ADDR_VEC_ALIGN (next);
2460 #else
2461 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2462 #endif
2463 ASM_OUTPUT_ALIGN (file, log_align);
2464 }
2465 else
2466 switch_to_section (current_function_section ());
2467
2468 #ifdef ASM_OUTPUT_CASE_LABEL
2469 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2470 next);
2471 #else
2472 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2473 #endif
2474 #endif
2475 break;
2476 }
2477 if (LABEL_ALT_ENTRY_P (insn))
2478 output_alternate_entry_point (file, insn);
2479 else
2480 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2481 break;
2482
2483 default:
2484 {
2485 rtx body = PATTERN (insn);
2486 int insn_code_number;
2487 const char *templ;
2488 bool is_stmt;
2489
2490 /* Reset this early so it is correct for ASM statements. */
2491 current_insn_predicate = NULL_RTX;
2492
2493 /* An INSN, JUMP_INSN or CALL_INSN.
2494 First check for special kinds that recog doesn't recognize. */
2495
2496 if (GET_CODE (body) == USE /* These are just declarations. */
2497 || GET_CODE (body) == CLOBBER)
2498 break;
2499
2500 #if HAVE_cc0
2501 {
2502 /* If there is a REG_CC_SETTER note on this insn, it means that
2503 the setting of the condition code was done in the delay slot
2504 of the insn that branched here. So recover the cc status
2505 from the insn that set it. */
2506
2507 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2508 if (note)
2509 {
2510 rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2511 NOTICE_UPDATE_CC (PATTERN (other), other);
2512 cc_prev_status = cc_status;
2513 }
2514 }
2515 #endif
2516
2517 /* Detect insns that are really jump-tables
2518 and output them as such. */
2519
2520 if (JUMP_TABLE_DATA_P (insn))
2521 {
2522 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2523 int vlen, idx;
2524 #endif
2525
2526 if (! JUMP_TABLES_IN_TEXT_SECTION)
2527 switch_to_section (targetm.asm_out.function_rodata_section
2528 (current_function_decl));
2529 else
2530 switch_to_section (current_function_section ());
2531
2532 app_disable ();
2533
2534 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2535 if (GET_CODE (body) == ADDR_VEC)
2536 {
2537 #ifdef ASM_OUTPUT_ADDR_VEC
2538 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2539 #else
2540 gcc_unreachable ();
2541 #endif
2542 }
2543 else
2544 {
2545 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2546 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2547 #else
2548 gcc_unreachable ();
2549 #endif
2550 }
2551 #else
2552 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2553 for (idx = 0; idx < vlen; idx++)
2554 {
2555 if (GET_CODE (body) == ADDR_VEC)
2556 {
2557 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2558 ASM_OUTPUT_ADDR_VEC_ELT
2559 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2560 #else
2561 gcc_unreachable ();
2562 #endif
2563 }
2564 else
2565 {
2566 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2567 ASM_OUTPUT_ADDR_DIFF_ELT
2568 (file,
2569 body,
2570 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2571 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2572 #else
2573 gcc_unreachable ();
2574 #endif
2575 }
2576 }
2577 #ifdef ASM_OUTPUT_CASE_END
2578 ASM_OUTPUT_CASE_END (file,
2579 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2580 insn);
2581 #endif
2582 #endif
2583
2584 switch_to_section (current_function_section ());
2585
2586 break;
2587 }
2588 /* Output this line note if it is the first or the last line
2589 note in a row. */
2590 if (!DECL_IGNORED_P (current_function_decl)
2591 && notice_source_line (insn, &is_stmt))
2592 {
2593 if (flag_verbose_asm)
2594 asm_show_source (last_filename, last_linenum);
2595 (*debug_hooks->source_line) (last_linenum, last_filename,
2596 last_discriminator, is_stmt);
2597 }
2598
2599 if (GET_CODE (body) == PARALLEL
2600 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2601 body = XVECEXP (body, 0, 0);
2602
2603 if (GET_CODE (body) == ASM_INPUT)
2604 {
2605 const char *string = XSTR (body, 0);
2606
2607 /* There's no telling what that did to the condition codes. */
2608 CC_STATUS_INIT;
2609
2610 if (string[0])
2611 {
2612 expanded_location loc;
2613
2614 app_enable ();
2615 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2616 if (*loc.file && loc.line)
2617 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2618 ASM_COMMENT_START, loc.line, loc.file);
2619 fprintf (asm_out_file, "\t%s\n", string);
2620 #if HAVE_AS_LINE_ZERO
2621 if (*loc.file && loc.line)
2622 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2623 #endif
2624 }
2625 break;
2626 }
2627
2628 /* Detect `asm' construct with operands. */
2629 if (asm_noperands (body) >= 0)
2630 {
2631 unsigned int noperands = asm_noperands (body);
2632 rtx *ops = XALLOCAVEC (rtx, noperands);
2633 const char *string;
2634 location_t loc;
2635 expanded_location expanded;
2636
2637 /* There's no telling what that did to the condition codes. */
2638 CC_STATUS_INIT;
2639
2640 /* Get out the operand values. */
2641 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2642 /* Inhibit dying on what would otherwise be compiler bugs. */
2643 insn_noperands = noperands;
2644 this_is_asm_operands = insn;
2645 expanded = expand_location (loc);
2646
2647 #ifdef FINAL_PRESCAN_INSN
2648 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2649 #endif
2650
2651 /* Output the insn using them. */
2652 if (string[0])
2653 {
2654 app_enable ();
2655 if (expanded.file && expanded.line)
2656 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2657 ASM_COMMENT_START, expanded.line, expanded.file);
2658 output_asm_insn (string, ops);
2659 #if HAVE_AS_LINE_ZERO
2660 if (expanded.file && expanded.line)
2661 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2662 #endif
2663 }
2664
2665 if (targetm.asm_out.final_postscan_insn)
2666 targetm.asm_out.final_postscan_insn (file, insn, ops,
2667 insn_noperands);
2668
2669 this_is_asm_operands = 0;
2670 break;
2671 }
2672
2673 app_disable ();
2674
2675 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2676 {
2677 /* A delayed-branch sequence */
2678 int i;
2679
2680 final_sequence = seq;
2681
2682 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2683 force the restoration of a comparison that was previously
2684 thought unnecessary. If that happens, cancel this sequence
2685 and cause that insn to be restored. */
2686
2687 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2688 if (next != seq->insn (1))
2689 {
2690 final_sequence = 0;
2691 return next;
2692 }
2693
2694 for (i = 1; i < seq->len (); i++)
2695 {
2696 rtx_insn *insn = seq->insn (i);
2697 rtx_insn *next = NEXT_INSN (insn);
2698 /* We loop in case any instruction in a delay slot gets
2699 split. */
2700 do
2701 insn = final_scan_insn (insn, file, 0, 1, seen);
2702 while (insn != next);
2703 }
2704 #ifdef DBR_OUTPUT_SEQEND
2705 DBR_OUTPUT_SEQEND (file);
2706 #endif
2707 final_sequence = 0;
2708
2709 /* If the insn requiring the delay slot was a CALL_INSN, the
2710 insns in the delay slot are actually executed before the
2711 called function. Hence we don't preserve any CC-setting
2712 actions in these insns and the CC must be marked as being
2713 clobbered by the function. */
2714 if (CALL_P (seq->insn (0)))
2715 {
2716 CC_STATUS_INIT;
2717 }
2718 break;
2719 }
2720
2721 /* We have a real machine instruction as rtl. */
2722
2723 body = PATTERN (insn);
2724
2725 #if HAVE_cc0
2726 set = single_set (insn);
2727
2728 /* Check for redundant test and compare instructions
2729 (when the condition codes are already set up as desired).
2730 This is done only when optimizing; if not optimizing,
2731 it should be possible for the user to alter a variable
2732 with the debugger in between statements
2733 and the next statement should reexamine the variable
2734 to compute the condition codes. */
2735
2736 if (optimize_p)
2737 {
2738 if (set
2739 && GET_CODE (SET_DEST (set)) == CC0
2740 && insn != last_ignored_compare)
2741 {
2742 rtx src1, src2;
2743 if (GET_CODE (SET_SRC (set)) == SUBREG)
2744 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2745
2746 src1 = SET_SRC (set);
2747 src2 = NULL_RTX;
2748 if (GET_CODE (SET_SRC (set)) == COMPARE)
2749 {
2750 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2751 XEXP (SET_SRC (set), 0)
2752 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2753 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2754 XEXP (SET_SRC (set), 1)
2755 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2756 if (XEXP (SET_SRC (set), 1)
2757 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2758 src2 = XEXP (SET_SRC (set), 0);
2759 }
2760 if ((cc_status.value1 != 0
2761 && rtx_equal_p (src1, cc_status.value1))
2762 || (cc_status.value2 != 0
2763 && rtx_equal_p (src1, cc_status.value2))
2764 || (src2 != 0 && cc_status.value1 != 0
2765 && rtx_equal_p (src2, cc_status.value1))
2766 || (src2 != 0 && cc_status.value2 != 0
2767 && rtx_equal_p (src2, cc_status.value2)))
2768 {
2769 /* Don't delete insn if it has an addressing side-effect. */
2770 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2771 /* or if anything in it is volatile. */
2772 && ! volatile_refs_p (PATTERN (insn)))
2773 {
2774 /* We don't really delete the insn; just ignore it. */
2775 last_ignored_compare = insn;
2776 break;
2777 }
2778 }
2779 }
2780 }
2781
2782 /* If this is a conditional branch, maybe modify it
2783 if the cc's are in a nonstandard state
2784 so that it accomplishes the same thing that it would
2785 do straightforwardly if the cc's were set up normally. */
2786
2787 if (cc_status.flags != 0
2788 && JUMP_P (insn)
2789 && GET_CODE (body) == SET
2790 && SET_DEST (body) == pc_rtx
2791 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2792 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2793 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2794 {
2795 /* This function may alter the contents of its argument
2796 and clear some of the cc_status.flags bits.
2797 It may also return 1 meaning condition now always true
2798 or -1 meaning condition now always false
2799 or 2 meaning condition nontrivial but altered. */
2800 int result = alter_cond (XEXP (SET_SRC (body), 0));
2801 /* If condition now has fixed value, replace the IF_THEN_ELSE
2802 with its then-operand or its else-operand. */
2803 if (result == 1)
2804 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2805 if (result == -1)
2806 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2807
2808 /* The jump is now either unconditional or a no-op.
2809 If it has become a no-op, don't try to output it.
2810 (It would not be recognized.) */
2811 if (SET_SRC (body) == pc_rtx)
2812 {
2813 delete_insn (insn);
2814 break;
2815 }
2816 else if (ANY_RETURN_P (SET_SRC (body)))
2817 /* Replace (set (pc) (return)) with (return). */
2818 PATTERN (insn) = body = SET_SRC (body);
2819
2820 /* Rerecognize the instruction if it has changed. */
2821 if (result != 0)
2822 INSN_CODE (insn) = -1;
2823 }
2824
2825 /* If this is a conditional trap, maybe modify it if the cc's
2826 are in a nonstandard state so that it accomplishes the same
2827 thing that it would do straightforwardly if the cc's were
2828 set up normally. */
2829 if (cc_status.flags != 0
2830 && NONJUMP_INSN_P (insn)
2831 && GET_CODE (body) == TRAP_IF
2832 && COMPARISON_P (TRAP_CONDITION (body))
2833 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2834 {
2835 /* This function may alter the contents of its argument
2836 and clear some of the cc_status.flags bits.
2837 It may also return 1 meaning condition now always true
2838 or -1 meaning condition now always false
2839 or 2 meaning condition nontrivial but altered. */
2840 int result = alter_cond (TRAP_CONDITION (body));
2841
2842 /* If TRAP_CONDITION has become always false, delete the
2843 instruction. */
2844 if (result == -1)
2845 {
2846 delete_insn (insn);
2847 break;
2848 }
2849
2850 /* If TRAP_CONDITION has become always true, replace
2851 TRAP_CONDITION with const_true_rtx. */
2852 if (result == 1)
2853 TRAP_CONDITION (body) = const_true_rtx;
2854
2855 /* Rerecognize the instruction if it has changed. */
2856 if (result != 0)
2857 INSN_CODE (insn) = -1;
2858 }
2859
2860 /* Make same adjustments to instructions that examine the
2861 condition codes without jumping and instructions that
2862 handle conditional moves (if this machine has either one). */
2863
2864 if (cc_status.flags != 0
2865 && set != 0)
2866 {
2867 rtx cond_rtx, then_rtx, else_rtx;
2868
2869 if (!JUMP_P (insn)
2870 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2871 {
2872 cond_rtx = XEXP (SET_SRC (set), 0);
2873 then_rtx = XEXP (SET_SRC (set), 1);
2874 else_rtx = XEXP (SET_SRC (set), 2);
2875 }
2876 else
2877 {
2878 cond_rtx = SET_SRC (set);
2879 then_rtx = const_true_rtx;
2880 else_rtx = const0_rtx;
2881 }
2882
2883 if (COMPARISON_P (cond_rtx)
2884 && XEXP (cond_rtx, 0) == cc0_rtx)
2885 {
2886 int result;
2887 result = alter_cond (cond_rtx);
2888 if (result == 1)
2889 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2890 else if (result == -1)
2891 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2892 else if (result == 2)
2893 INSN_CODE (insn) = -1;
2894 if (SET_DEST (set) == SET_SRC (set))
2895 delete_insn (insn);
2896 }
2897 }
2898
2899 #endif
2900
2901 /* Do machine-specific peephole optimizations if desired. */
2902
2903 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2904 {
2905 rtx_insn *next = peephole (insn);
2906 /* When peepholing, if there were notes within the peephole,
2907 emit them before the peephole. */
2908 if (next != 0 && next != NEXT_INSN (insn))
2909 {
2910 rtx_insn *note, *prev = PREV_INSN (insn);
2911
2912 for (note = NEXT_INSN (insn); note != next;
2913 note = NEXT_INSN (note))
2914 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2915
2916 /* Put the notes in the proper position for a later
2917 rescan. For example, the SH target can do this
2918 when generating a far jump in a delayed branch
2919 sequence. */
2920 note = NEXT_INSN (insn);
2921 SET_PREV_INSN (note) = prev;
2922 SET_NEXT_INSN (prev) = note;
2923 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2924 SET_PREV_INSN (insn) = PREV_INSN (next);
2925 SET_NEXT_INSN (insn) = next;
2926 SET_PREV_INSN (next) = insn;
2927 }
2928
2929 /* PEEPHOLE might have changed this. */
2930 body = PATTERN (insn);
2931 }
2932
2933 /* Try to recognize the instruction.
2934 If successful, verify that the operands satisfy the
2935 constraints for the instruction. Crash if they don't,
2936 since `reload' should have changed them so that they do. */
2937
2938 insn_code_number = recog_memoized (insn);
2939 cleanup_subreg_operands (insn);
2940
2941 /* Dump the insn in the assembly for debugging (-dAP).
2942 If the final dump is requested as slim RTL, dump slim
2943 RTL to the assembly file also. */
2944 if (flag_dump_rtl_in_asm)
2945 {
2946 print_rtx_head = ASM_COMMENT_START;
2947 if (! (dump_flags & TDF_SLIM))
2948 print_rtl_single (asm_out_file, insn);
2949 else
2950 dump_insn_slim (asm_out_file, insn);
2951 print_rtx_head = "";
2952 }
2953
2954 if (! constrain_operands_cached (insn, 1))
2955 fatal_insn_not_found (insn);
2956
2957 /* Some target machines need to prescan each insn before
2958 it is output. */
2959
2960 #ifdef FINAL_PRESCAN_INSN
2961 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2962 #endif
2963
2964 if (targetm.have_conditional_execution ()
2965 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2966 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2967
2968 #if HAVE_cc0
2969 cc_prev_status = cc_status;
2970
2971 /* Update `cc_status' for this instruction.
2972 The instruction's output routine may change it further.
2973 If the output routine for a jump insn needs to depend
2974 on the cc status, it should look at cc_prev_status. */
2975
2976 NOTICE_UPDATE_CC (body, insn);
2977 #endif
2978
2979 current_output_insn = debug_insn = insn;
2980
2981 /* Find the proper template for this insn. */
2982 templ = get_insn_template (insn_code_number, insn);
2983
2984 /* If the C code returns 0, it means that it is a jump insn
2985 which follows a deleted test insn, and that test insn
2986 needs to be reinserted. */
2987 if (templ == 0)
2988 {
2989 rtx_insn *prev;
2990
2991 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2992
2993 /* We have already processed the notes between the setter and
2994 the user. Make sure we don't process them again, this is
2995 particularly important if one of the notes is a block
2996 scope note or an EH note. */
2997 for (prev = insn;
2998 prev != last_ignored_compare;
2999 prev = PREV_INSN (prev))
3000 {
3001 if (NOTE_P (prev))
3002 delete_insn (prev); /* Use delete_note. */
3003 }
3004
3005 return prev;
3006 }
3007
3008 /* If the template is the string "#", it means that this insn must
3009 be split. */
3010 if (templ[0] == '#' && templ[1] == '\0')
3011 {
3012 rtx_insn *new_rtx = try_split (body, insn, 0);
3013
3014 /* If we didn't split the insn, go away. */
3015 if (new_rtx == insn && PATTERN (new_rtx) == body)
3016 fatal_insn ("could not split insn", insn);
3017
3018 /* If we have a length attribute, this instruction should have
3019 been split in shorten_branches, to ensure that we would have
3020 valid length info for the splitees. */
3021 gcc_assert (!HAVE_ATTR_length);
3022
3023 return new_rtx;
3024 }
3025
3026 /* ??? This will put the directives in the wrong place if
3027 get_insn_template outputs assembly directly. However calling it
3028 before get_insn_template breaks if the insns is split. */
3029 if (targetm.asm_out.unwind_emit_before_insn
3030 && targetm.asm_out.unwind_emit)
3031 targetm.asm_out.unwind_emit (asm_out_file, insn);
3032
3033 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
3034 if (call_insn != NULL)
3035 {
3036 rtx x = call_from_call_insn (call_insn);
3037 x = XEXP (x, 0);
3038 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3039 {
3040 tree t;
3041 x = XEXP (x, 0);
3042 t = SYMBOL_REF_DECL (x);
3043 if (t)
3044 assemble_external (t);
3045 }
3046 }
3047
3048 /* Output assembler code from the template. */
3049 output_asm_insn (templ, recog_data.operand);
3050
3051 /* Some target machines need to postscan each insn after
3052 it is output. */
3053 if (targetm.asm_out.final_postscan_insn)
3054 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3055 recog_data.n_operands);
3056
3057 if (!targetm.asm_out.unwind_emit_before_insn
3058 && targetm.asm_out.unwind_emit)
3059 targetm.asm_out.unwind_emit (asm_out_file, insn);
3060
3061 /* Let the debug info back-end know about this call. We do this only
3062 after the instruction has been emitted because labels that may be
3063 created to reference the call instruction must appear after it. */
3064 if (call_insn != NULL && !DECL_IGNORED_P (current_function_decl))
3065 debug_hooks->var_location (insn);
3066
3067 current_output_insn = debug_insn = 0;
3068 }
3069 }
3070 return NEXT_INSN (insn);
3071 }
3072 \f
3073 /* Return whether a source line note needs to be emitted before INSN.
3074 Sets IS_STMT to TRUE if the line should be marked as a possible
3075 breakpoint location. */
3076
3077 static bool
3078 notice_source_line (rtx_insn *insn, bool *is_stmt)
3079 {
3080 const char *filename;
3081 int linenum;
3082
3083 if (override_filename)
3084 {
3085 filename = override_filename;
3086 linenum = override_linenum;
3087 }
3088 else if (INSN_HAS_LOCATION (insn))
3089 {
3090 expanded_location xloc = insn_location (insn);
3091 filename = xloc.file;
3092 linenum = xloc.line;
3093 }
3094 else
3095 {
3096 filename = NULL;
3097 linenum = 0;
3098 }
3099
3100 if (filename == NULL)
3101 return false;
3102
3103 if (force_source_line
3104 || filename != last_filename
3105 || last_linenum != linenum)
3106 {
3107 force_source_line = false;
3108 last_filename = filename;
3109 last_linenum = linenum;
3110 last_discriminator = discriminator;
3111 *is_stmt = true;
3112 high_block_linenum = MAX (last_linenum, high_block_linenum);
3113 high_function_linenum = MAX (last_linenum, high_function_linenum);
3114 return true;
3115 }
3116
3117 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3118 {
3119 /* If the discriminator changed, but the line number did not,
3120 output the line table entry with is_stmt false so the
3121 debugger does not treat this as a breakpoint location. */
3122 last_discriminator = discriminator;
3123 *is_stmt = false;
3124 return true;
3125 }
3126
3127 return false;
3128 }
3129 \f
3130 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3131 directly to the desired hard register. */
3132
3133 void
3134 cleanup_subreg_operands (rtx_insn *insn)
3135 {
3136 int i;
3137 bool changed = false;
3138 extract_insn_cached (insn);
3139 for (i = 0; i < recog_data.n_operands; i++)
3140 {
3141 /* The following test cannot use recog_data.operand when testing
3142 for a SUBREG: the underlying object might have been changed
3143 already if we are inside a match_operator expression that
3144 matches the else clause. Instead we test the underlying
3145 expression directly. */
3146 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3147 {
3148 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3149 changed = true;
3150 }
3151 else if (GET_CODE (recog_data.operand[i]) == PLUS
3152 || GET_CODE (recog_data.operand[i]) == MULT
3153 || MEM_P (recog_data.operand[i]))
3154 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3155 }
3156
3157 for (i = 0; i < recog_data.n_dups; i++)
3158 {
3159 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3160 {
3161 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3162 changed = true;
3163 }
3164 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3165 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3166 || MEM_P (*recog_data.dup_loc[i]))
3167 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3168 }
3169 if (changed)
3170 df_insn_rescan (insn);
3171 }
3172
3173 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3174 the thing it is a subreg of. Do it anyway if FINAL_P. */
3175
3176 rtx
3177 alter_subreg (rtx *xp, bool final_p)
3178 {
3179 rtx x = *xp;
3180 rtx y = SUBREG_REG (x);
3181
3182 /* simplify_subreg does not remove subreg from volatile references.
3183 We are required to. */
3184 if (MEM_P (y))
3185 {
3186 int offset = SUBREG_BYTE (x);
3187
3188 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3189 contains 0 instead of the proper offset. See simplify_subreg. */
3190 if (offset == 0
3191 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3192 {
3193 int difference = GET_MODE_SIZE (GET_MODE (y))
3194 - GET_MODE_SIZE (GET_MODE (x));
3195 if (WORDS_BIG_ENDIAN)
3196 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3197 if (BYTES_BIG_ENDIAN)
3198 offset += difference % UNITS_PER_WORD;
3199 }
3200
3201 if (final_p)
3202 *xp = adjust_address (y, GET_MODE (x), offset);
3203 else
3204 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3205 }
3206 else if (REG_P (y) && HARD_REGISTER_P (y))
3207 {
3208 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3209 SUBREG_BYTE (x));
3210
3211 if (new_rtx != 0)
3212 *xp = new_rtx;
3213 else if (final_p && REG_P (y))
3214 {
3215 /* Simplify_subreg can't handle some REG cases, but we have to. */
3216 unsigned int regno;
3217 HOST_WIDE_INT offset;
3218
3219 regno = subreg_regno (x);
3220 if (subreg_lowpart_p (x))
3221 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3222 else
3223 offset = SUBREG_BYTE (x);
3224 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3225 }
3226 }
3227
3228 return *xp;
3229 }
3230
3231 /* Do alter_subreg on all the SUBREGs contained in X. */
3232
3233 static rtx
3234 walk_alter_subreg (rtx *xp, bool *changed)
3235 {
3236 rtx x = *xp;
3237 switch (GET_CODE (x))
3238 {
3239 case PLUS:
3240 case MULT:
3241 case AND:
3242 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3243 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3244 break;
3245
3246 case MEM:
3247 case ZERO_EXTEND:
3248 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3249 break;
3250
3251 case SUBREG:
3252 *changed = true;
3253 return alter_subreg (xp, true);
3254
3255 default:
3256 break;
3257 }
3258
3259 return *xp;
3260 }
3261 \f
3262 #if HAVE_cc0
3263
3264 /* Given BODY, the body of a jump instruction, alter the jump condition
3265 as required by the bits that are set in cc_status.flags.
3266 Not all of the bits there can be handled at this level in all cases.
3267
3268 The value is normally 0.
3269 1 means that the condition has become always true.
3270 -1 means that the condition has become always false.
3271 2 means that COND has been altered. */
3272
3273 static int
3274 alter_cond (rtx cond)
3275 {
3276 int value = 0;
3277
3278 if (cc_status.flags & CC_REVERSED)
3279 {
3280 value = 2;
3281 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3282 }
3283
3284 if (cc_status.flags & CC_INVERTED)
3285 {
3286 value = 2;
3287 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3288 }
3289
3290 if (cc_status.flags & CC_NOT_POSITIVE)
3291 switch (GET_CODE (cond))
3292 {
3293 case LE:
3294 case LEU:
3295 case GEU:
3296 /* Jump becomes unconditional. */
3297 return 1;
3298
3299 case GT:
3300 case GTU:
3301 case LTU:
3302 /* Jump becomes no-op. */
3303 return -1;
3304
3305 case GE:
3306 PUT_CODE (cond, EQ);
3307 value = 2;
3308 break;
3309
3310 case LT:
3311 PUT_CODE (cond, NE);
3312 value = 2;
3313 break;
3314
3315 default:
3316 break;
3317 }
3318
3319 if (cc_status.flags & CC_NOT_NEGATIVE)
3320 switch (GET_CODE (cond))
3321 {
3322 case GE:
3323 case GEU:
3324 /* Jump becomes unconditional. */
3325 return 1;
3326
3327 case LT:
3328 case LTU:
3329 /* Jump becomes no-op. */
3330 return -1;
3331
3332 case LE:
3333 case LEU:
3334 PUT_CODE (cond, EQ);
3335 value = 2;
3336 break;
3337
3338 case GT:
3339 case GTU:
3340 PUT_CODE (cond, NE);
3341 value = 2;
3342 break;
3343
3344 default:
3345 break;
3346 }
3347
3348 if (cc_status.flags & CC_NO_OVERFLOW)
3349 switch (GET_CODE (cond))
3350 {
3351 case GEU:
3352 /* Jump becomes unconditional. */
3353 return 1;
3354
3355 case LEU:
3356 PUT_CODE (cond, EQ);
3357 value = 2;
3358 break;
3359
3360 case GTU:
3361 PUT_CODE (cond, NE);
3362 value = 2;
3363 break;
3364
3365 case LTU:
3366 /* Jump becomes no-op. */
3367 return -1;
3368
3369 default:
3370 break;
3371 }
3372
3373 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3374 switch (GET_CODE (cond))
3375 {
3376 default:
3377 gcc_unreachable ();
3378
3379 case NE:
3380 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3381 value = 2;
3382 break;
3383
3384 case EQ:
3385 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3386 value = 2;
3387 break;
3388 }
3389
3390 if (cc_status.flags & CC_NOT_SIGNED)
3391 /* The flags are valid if signed condition operators are converted
3392 to unsigned. */
3393 switch (GET_CODE (cond))
3394 {
3395 case LE:
3396 PUT_CODE (cond, LEU);
3397 value = 2;
3398 break;
3399
3400 case LT:
3401 PUT_CODE (cond, LTU);
3402 value = 2;
3403 break;
3404
3405 case GT:
3406 PUT_CODE (cond, GTU);
3407 value = 2;
3408 break;
3409
3410 case GE:
3411 PUT_CODE (cond, GEU);
3412 value = 2;
3413 break;
3414
3415 default:
3416 break;
3417 }
3418
3419 return value;
3420 }
3421 #endif
3422 \f
3423 /* Report inconsistency between the assembler template and the operands.
3424 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3425
3426 void
3427 output_operand_lossage (const char *cmsgid, ...)
3428 {
3429 char *fmt_string;
3430 char *new_message;
3431 const char *pfx_str;
3432 va_list ap;
3433
3434 va_start (ap, cmsgid);
3435
3436 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3437 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3438 new_message = xvasprintf (fmt_string, ap);
3439
3440 if (this_is_asm_operands)
3441 error_for_asm (this_is_asm_operands, "%s", new_message);
3442 else
3443 internal_error ("%s", new_message);
3444
3445 free (fmt_string);
3446 free (new_message);
3447 va_end (ap);
3448 }
3449 \f
3450 /* Output of assembler code from a template, and its subroutines. */
3451
3452 /* Annotate the assembly with a comment describing the pattern and
3453 alternative used. */
3454
3455 static void
3456 output_asm_name (void)
3457 {
3458 if (debug_insn)
3459 {
3460 int num = INSN_CODE (debug_insn);
3461 fprintf (asm_out_file, "\t%s %d\t%s",
3462 ASM_COMMENT_START, INSN_UID (debug_insn),
3463 insn_data[num].name);
3464 if (insn_data[num].n_alternatives > 1)
3465 fprintf (asm_out_file, "/%d", which_alternative + 1);
3466
3467 if (HAVE_ATTR_length)
3468 fprintf (asm_out_file, "\t[length = %d]",
3469 get_attr_length (debug_insn));
3470
3471 /* Clear this so only the first assembler insn
3472 of any rtl insn will get the special comment for -dp. */
3473 debug_insn = 0;
3474 }
3475 }
3476
3477 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3478 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3479 corresponds to the address of the object and 0 if to the object. */
3480
3481 static tree
3482 get_mem_expr_from_op (rtx op, int *paddressp)
3483 {
3484 tree expr;
3485 int inner_addressp;
3486
3487 *paddressp = 0;
3488
3489 if (REG_P (op))
3490 return REG_EXPR (op);
3491 else if (!MEM_P (op))
3492 return 0;
3493
3494 if (MEM_EXPR (op) != 0)
3495 return MEM_EXPR (op);
3496
3497 /* Otherwise we have an address, so indicate it and look at the address. */
3498 *paddressp = 1;
3499 op = XEXP (op, 0);
3500
3501 /* First check if we have a decl for the address, then look at the right side
3502 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3503 But don't allow the address to itself be indirect. */
3504 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3505 return expr;
3506 else if (GET_CODE (op) == PLUS
3507 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3508 return expr;
3509
3510 while (UNARY_P (op)
3511 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3512 op = XEXP (op, 0);
3513
3514 expr = get_mem_expr_from_op (op, &inner_addressp);
3515 return inner_addressp ? 0 : expr;
3516 }
3517
3518 /* Output operand names for assembler instructions. OPERANDS is the
3519 operand vector, OPORDER is the order to write the operands, and NOPS
3520 is the number of operands to write. */
3521
3522 static void
3523 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3524 {
3525 int wrote = 0;
3526 int i;
3527
3528 for (i = 0; i < nops; i++)
3529 {
3530 int addressp;
3531 rtx op = operands[oporder[i]];
3532 tree expr = get_mem_expr_from_op (op, &addressp);
3533
3534 fprintf (asm_out_file, "%c%s",
3535 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3536 wrote = 1;
3537 if (expr)
3538 {
3539 fprintf (asm_out_file, "%s",
3540 addressp ? "*" : "");
3541 print_mem_expr (asm_out_file, expr);
3542 wrote = 1;
3543 }
3544 else if (REG_P (op) && ORIGINAL_REGNO (op)
3545 && ORIGINAL_REGNO (op) != REGNO (op))
3546 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3547 }
3548 }
3549
3550 #ifdef ASSEMBLER_DIALECT
3551 /* Helper function to parse assembler dialects in the asm string.
3552 This is called from output_asm_insn and asm_fprintf. */
3553 static const char *
3554 do_assembler_dialects (const char *p, int *dialect)
3555 {
3556 char c = *(p - 1);
3557
3558 switch (c)
3559 {
3560 case '{':
3561 {
3562 int i;
3563
3564 if (*dialect)
3565 output_operand_lossage ("nested assembly dialect alternatives");
3566 else
3567 *dialect = 1;
3568
3569 /* If we want the first dialect, do nothing. Otherwise, skip
3570 DIALECT_NUMBER of strings ending with '|'. */
3571 for (i = 0; i < dialect_number; i++)
3572 {
3573 while (*p && *p != '}')
3574 {
3575 if (*p == '|')
3576 {
3577 p++;
3578 break;
3579 }
3580
3581 /* Skip over any character after a percent sign. */
3582 if (*p == '%')
3583 p++;
3584 if (*p)
3585 p++;
3586 }
3587
3588 if (*p == '}')
3589 break;
3590 }
3591
3592 if (*p == '\0')
3593 output_operand_lossage ("unterminated assembly dialect alternative");
3594 }
3595 break;
3596
3597 case '|':
3598 if (*dialect)
3599 {
3600 /* Skip to close brace. */
3601 do
3602 {
3603 if (*p == '\0')
3604 {
3605 output_operand_lossage ("unterminated assembly dialect alternative");
3606 break;
3607 }
3608
3609 /* Skip over any character after a percent sign. */
3610 if (*p == '%' && p[1])
3611 {
3612 p += 2;
3613 continue;
3614 }
3615
3616 if (*p++ == '}')
3617 break;
3618 }
3619 while (1);
3620
3621 *dialect = 0;
3622 }
3623 else
3624 putc (c, asm_out_file);
3625 break;
3626
3627 case '}':
3628 if (! *dialect)
3629 putc (c, asm_out_file);
3630 *dialect = 0;
3631 break;
3632 default:
3633 gcc_unreachable ();
3634 }
3635
3636 return p;
3637 }
3638 #endif
3639
3640 /* Output text from TEMPLATE to the assembler output file,
3641 obeying %-directions to substitute operands taken from
3642 the vector OPERANDS.
3643
3644 %N (for N a digit) means print operand N in usual manner.
3645 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3646 and print the label name with no punctuation.
3647 %cN means require operand N to be a constant
3648 and print the constant expression with no punctuation.
3649 %aN means expect operand N to be a memory address
3650 (not a memory reference!) and print a reference
3651 to that address.
3652 %nN means expect operand N to be a constant
3653 and print a constant expression for minus the value
3654 of the operand, with no other punctuation. */
3655
3656 void
3657 output_asm_insn (const char *templ, rtx *operands)
3658 {
3659 const char *p;
3660 int c;
3661 #ifdef ASSEMBLER_DIALECT
3662 int dialect = 0;
3663 #endif
3664 int oporder[MAX_RECOG_OPERANDS];
3665 char opoutput[MAX_RECOG_OPERANDS];
3666 int ops = 0;
3667
3668 /* An insn may return a null string template
3669 in a case where no assembler code is needed. */
3670 if (*templ == 0)
3671 return;
3672
3673 memset (opoutput, 0, sizeof opoutput);
3674 p = templ;
3675 putc ('\t', asm_out_file);
3676
3677 #ifdef ASM_OUTPUT_OPCODE
3678 ASM_OUTPUT_OPCODE (asm_out_file, p);
3679 #endif
3680
3681 while ((c = *p++))
3682 switch (c)
3683 {
3684 case '\n':
3685 if (flag_verbose_asm)
3686 output_asm_operand_names (operands, oporder, ops);
3687 if (flag_print_asm_name)
3688 output_asm_name ();
3689
3690 ops = 0;
3691 memset (opoutput, 0, sizeof opoutput);
3692
3693 putc (c, asm_out_file);
3694 #ifdef ASM_OUTPUT_OPCODE
3695 while ((c = *p) == '\t')
3696 {
3697 putc (c, asm_out_file);
3698 p++;
3699 }
3700 ASM_OUTPUT_OPCODE (asm_out_file, p);
3701 #endif
3702 break;
3703
3704 #ifdef ASSEMBLER_DIALECT
3705 case '{':
3706 case '}':
3707 case '|':
3708 p = do_assembler_dialects (p, &dialect);
3709 break;
3710 #endif
3711
3712 case '%':
3713 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3714 if ASSEMBLER_DIALECT defined and these characters have a special
3715 meaning as dialect delimiters.*/
3716 if (*p == '%'
3717 #ifdef ASSEMBLER_DIALECT
3718 || *p == '{' || *p == '}' || *p == '|'
3719 #endif
3720 )
3721 {
3722 putc (*p, asm_out_file);
3723 p++;
3724 }
3725 /* %= outputs a number which is unique to each insn in the entire
3726 compilation. This is useful for making local labels that are
3727 referred to more than once in a given insn. */
3728 else if (*p == '=')
3729 {
3730 p++;
3731 fprintf (asm_out_file, "%d", insn_counter);
3732 }
3733 /* % followed by a letter and some digits
3734 outputs an operand in a special way depending on the letter.
3735 Letters `acln' are implemented directly.
3736 Other letters are passed to `output_operand' so that
3737 the TARGET_PRINT_OPERAND hook can define them. */
3738 else if (ISALPHA (*p))
3739 {
3740 int letter = *p++;
3741 unsigned long opnum;
3742 char *endptr;
3743
3744 opnum = strtoul (p, &endptr, 10);
3745
3746 if (endptr == p)
3747 output_operand_lossage ("operand number missing "
3748 "after %%-letter");
3749 else if (this_is_asm_operands && opnum >= insn_noperands)
3750 output_operand_lossage ("operand number out of range");
3751 else if (letter == 'l')
3752 output_asm_label (operands[opnum]);
3753 else if (letter == 'a')
3754 output_address (VOIDmode, operands[opnum]);
3755 else if (letter == 'c')
3756 {
3757 if (CONSTANT_ADDRESS_P (operands[opnum]))
3758 output_addr_const (asm_out_file, operands[opnum]);
3759 else
3760 output_operand (operands[opnum], 'c');
3761 }
3762 else if (letter == 'n')
3763 {
3764 if (CONST_INT_P (operands[opnum]))
3765 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3766 - INTVAL (operands[opnum]));
3767 else
3768 {
3769 putc ('-', asm_out_file);
3770 output_addr_const (asm_out_file, operands[opnum]);
3771 }
3772 }
3773 else
3774 output_operand (operands[opnum], letter);
3775
3776 if (!opoutput[opnum])
3777 oporder[ops++] = opnum;
3778 opoutput[opnum] = 1;
3779
3780 p = endptr;
3781 c = *p;
3782 }
3783 /* % followed by a digit outputs an operand the default way. */
3784 else if (ISDIGIT (*p))
3785 {
3786 unsigned long opnum;
3787 char *endptr;
3788
3789 opnum = strtoul (p, &endptr, 10);
3790 if (this_is_asm_operands && opnum >= insn_noperands)
3791 output_operand_lossage ("operand number out of range");
3792 else
3793 output_operand (operands[opnum], 0);
3794
3795 if (!opoutput[opnum])
3796 oporder[ops++] = opnum;
3797 opoutput[opnum] = 1;
3798
3799 p = endptr;
3800 c = *p;
3801 }
3802 /* % followed by punctuation: output something for that
3803 punctuation character alone, with no operand. The
3804 TARGET_PRINT_OPERAND hook decides what is actually done. */
3805 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3806 output_operand (NULL_RTX, *p++);
3807 else
3808 output_operand_lossage ("invalid %%-code");
3809 break;
3810
3811 default:
3812 putc (c, asm_out_file);
3813 }
3814
3815 /* Write out the variable names for operands, if we know them. */
3816 if (flag_verbose_asm)
3817 output_asm_operand_names (operands, oporder, ops);
3818 if (flag_print_asm_name)
3819 output_asm_name ();
3820
3821 putc ('\n', asm_out_file);
3822 }
3823 \f
3824 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3825
3826 void
3827 output_asm_label (rtx x)
3828 {
3829 char buf[256];
3830
3831 if (GET_CODE (x) == LABEL_REF)
3832 x = label_ref_label (x);
3833 if (LABEL_P (x)
3834 || (NOTE_P (x)
3835 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3836 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3837 else
3838 output_operand_lossage ("'%%l' operand isn't a label");
3839
3840 assemble_name (asm_out_file, buf);
3841 }
3842
3843 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3844
3845 void
3846 mark_symbol_refs_as_used (rtx x)
3847 {
3848 subrtx_iterator::array_type array;
3849 FOR_EACH_SUBRTX (iter, array, x, ALL)
3850 {
3851 const_rtx x = *iter;
3852 if (GET_CODE (x) == SYMBOL_REF)
3853 if (tree t = SYMBOL_REF_DECL (x))
3854 assemble_external (t);
3855 }
3856 }
3857
3858 /* Print operand X using machine-dependent assembler syntax.
3859 CODE is a non-digit that preceded the operand-number in the % spec,
3860 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3861 between the % and the digits.
3862 When CODE is a non-letter, X is 0.
3863
3864 The meanings of the letters are machine-dependent and controlled
3865 by TARGET_PRINT_OPERAND. */
3866
3867 void
3868 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3869 {
3870 if (x && GET_CODE (x) == SUBREG)
3871 x = alter_subreg (&x, true);
3872
3873 /* X must not be a pseudo reg. */
3874 if (!targetm.no_register_allocation)
3875 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3876
3877 targetm.asm_out.print_operand (asm_out_file, x, code);
3878
3879 if (x == NULL_RTX)
3880 return;
3881
3882 mark_symbol_refs_as_used (x);
3883 }
3884
3885 /* Print a memory reference operand for address X using
3886 machine-dependent assembler syntax. */
3887
3888 void
3889 output_address (machine_mode mode, rtx x)
3890 {
3891 bool changed = false;
3892 walk_alter_subreg (&x, &changed);
3893 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3894 }
3895 \f
3896 /* Print an integer constant expression in assembler syntax.
3897 Addition and subtraction are the only arithmetic
3898 that may appear in these expressions. */
3899
3900 void
3901 output_addr_const (FILE *file, rtx x)
3902 {
3903 char buf[256];
3904
3905 restart:
3906 switch (GET_CODE (x))
3907 {
3908 case PC:
3909 putc ('.', file);
3910 break;
3911
3912 case SYMBOL_REF:
3913 if (SYMBOL_REF_DECL (x))
3914 assemble_external (SYMBOL_REF_DECL (x));
3915 #ifdef ASM_OUTPUT_SYMBOL_REF
3916 ASM_OUTPUT_SYMBOL_REF (file, x);
3917 #else
3918 assemble_name (file, XSTR (x, 0));
3919 #endif
3920 break;
3921
3922 case LABEL_REF:
3923 x = label_ref_label (x);
3924 /* Fall through. */
3925 case CODE_LABEL:
3926 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3927 #ifdef ASM_OUTPUT_LABEL_REF
3928 ASM_OUTPUT_LABEL_REF (file, buf);
3929 #else
3930 assemble_name (file, buf);
3931 #endif
3932 break;
3933
3934 case CONST_INT:
3935 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3936 break;
3937
3938 case CONST:
3939 /* This used to output parentheses around the expression,
3940 but that does not work on the 386 (either ATT or BSD assembler). */
3941 output_addr_const (file, XEXP (x, 0));
3942 break;
3943
3944 case CONST_WIDE_INT:
3945 /* We do not know the mode here so we have to use a round about
3946 way to build a wide-int to get it printed properly. */
3947 {
3948 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3949 CONST_WIDE_INT_NUNITS (x),
3950 CONST_WIDE_INT_NUNITS (x)
3951 * HOST_BITS_PER_WIDE_INT,
3952 false);
3953 print_decs (w, file);
3954 }
3955 break;
3956
3957 case CONST_DOUBLE:
3958 if (CONST_DOUBLE_AS_INT_P (x))
3959 {
3960 /* We can use %d if the number is one word and positive. */
3961 if (CONST_DOUBLE_HIGH (x))
3962 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3963 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3964 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3965 else if (CONST_DOUBLE_LOW (x) < 0)
3966 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3967 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3968 else
3969 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3970 }
3971 else
3972 /* We can't handle floating point constants;
3973 PRINT_OPERAND must handle them. */
3974 output_operand_lossage ("floating constant misused");
3975 break;
3976
3977 case CONST_FIXED:
3978 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3979 break;
3980
3981 case PLUS:
3982 /* Some assemblers need integer constants to appear last (eg masm). */
3983 if (CONST_INT_P (XEXP (x, 0)))
3984 {
3985 output_addr_const (file, XEXP (x, 1));
3986 if (INTVAL (XEXP (x, 0)) >= 0)
3987 fprintf (file, "+");
3988 output_addr_const (file, XEXP (x, 0));
3989 }
3990 else
3991 {
3992 output_addr_const (file, XEXP (x, 0));
3993 if (!CONST_INT_P (XEXP (x, 1))
3994 || INTVAL (XEXP (x, 1)) >= 0)
3995 fprintf (file, "+");
3996 output_addr_const (file, XEXP (x, 1));
3997 }
3998 break;
3999
4000 case MINUS:
4001 /* Avoid outputting things like x-x or x+5-x,
4002 since some assemblers can't handle that. */
4003 x = simplify_subtraction (x);
4004 if (GET_CODE (x) != MINUS)
4005 goto restart;
4006
4007 output_addr_const (file, XEXP (x, 0));
4008 fprintf (file, "-");
4009 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
4010 || GET_CODE (XEXP (x, 1)) == PC
4011 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
4012 output_addr_const (file, XEXP (x, 1));
4013 else
4014 {
4015 fputs (targetm.asm_out.open_paren, file);
4016 output_addr_const (file, XEXP (x, 1));
4017 fputs (targetm.asm_out.close_paren, file);
4018 }
4019 break;
4020
4021 case ZERO_EXTEND:
4022 case SIGN_EXTEND:
4023 case SUBREG:
4024 case TRUNCATE:
4025 output_addr_const (file, XEXP (x, 0));
4026 break;
4027
4028 default:
4029 if (targetm.asm_out.output_addr_const_extra (file, x))
4030 break;
4031
4032 output_operand_lossage ("invalid expression as operand");
4033 }
4034 }
4035 \f
4036 /* Output a quoted string. */
4037
4038 void
4039 output_quoted_string (FILE *asm_file, const char *string)
4040 {
4041 #ifdef OUTPUT_QUOTED_STRING
4042 OUTPUT_QUOTED_STRING (asm_file, string);
4043 #else
4044 char c;
4045
4046 putc ('\"', asm_file);
4047 while ((c = *string++) != 0)
4048 {
4049 if (ISPRINT (c))
4050 {
4051 if (c == '\"' || c == '\\')
4052 putc ('\\', asm_file);
4053 putc (c, asm_file);
4054 }
4055 else
4056 fprintf (asm_file, "\\%03o", (unsigned char) c);
4057 }
4058 putc ('\"', asm_file);
4059 #endif
4060 }
4061 \f
4062 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4063
4064 void
4065 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4066 {
4067 char buf[2 + CHAR_BIT * sizeof (value) / 4];
4068 if (value == 0)
4069 putc ('0', f);
4070 else
4071 {
4072 char *p = buf + sizeof (buf);
4073 do
4074 *--p = "0123456789abcdef"[value % 16];
4075 while ((value /= 16) != 0);
4076 *--p = 'x';
4077 *--p = '0';
4078 fwrite (p, 1, buf + sizeof (buf) - p, f);
4079 }
4080 }
4081
4082 /* Internal function that prints an unsigned long in decimal in reverse.
4083 The output string IS NOT null-terminated. */
4084
4085 static int
4086 sprint_ul_rev (char *s, unsigned long value)
4087 {
4088 int i = 0;
4089 do
4090 {
4091 s[i] = "0123456789"[value % 10];
4092 value /= 10;
4093 i++;
4094 /* alternate version, without modulo */
4095 /* oldval = value; */
4096 /* value /= 10; */
4097 /* s[i] = "0123456789" [oldval - 10*value]; */
4098 /* i++ */
4099 }
4100 while (value != 0);
4101 return i;
4102 }
4103
4104 /* Write an unsigned long as decimal to a file, fast. */
4105
4106 void
4107 fprint_ul (FILE *f, unsigned long value)
4108 {
4109 /* python says: len(str(2**64)) == 20 */
4110 char s[20];
4111 int i;
4112
4113 i = sprint_ul_rev (s, value);
4114
4115 /* It's probably too small to bother with string reversal and fputs. */
4116 do
4117 {
4118 i--;
4119 putc (s[i], f);
4120 }
4121 while (i != 0);
4122 }
4123
4124 /* Write an unsigned long as decimal to a string, fast.
4125 s must be wide enough to not overflow, at least 21 chars.
4126 Returns the length of the string (without terminating '\0'). */
4127
4128 int
4129 sprint_ul (char *s, unsigned long value)
4130 {
4131 int len = sprint_ul_rev (s, value);
4132 s[len] = '\0';
4133
4134 std::reverse (s, s + len);
4135 return len;
4136 }
4137
4138 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4139 %R prints the value of REGISTER_PREFIX.
4140 %L prints the value of LOCAL_LABEL_PREFIX.
4141 %U prints the value of USER_LABEL_PREFIX.
4142 %I prints the value of IMMEDIATE_PREFIX.
4143 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4144 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4145
4146 We handle alternate assembler dialects here, just like output_asm_insn. */
4147
4148 void
4149 asm_fprintf (FILE *file, const char *p, ...)
4150 {
4151 char buf[10];
4152 char *q, c;
4153 #ifdef ASSEMBLER_DIALECT
4154 int dialect = 0;
4155 #endif
4156 va_list argptr;
4157
4158 va_start (argptr, p);
4159
4160 buf[0] = '%';
4161
4162 while ((c = *p++))
4163 switch (c)
4164 {
4165 #ifdef ASSEMBLER_DIALECT
4166 case '{':
4167 case '}':
4168 case '|':
4169 p = do_assembler_dialects (p, &dialect);
4170 break;
4171 #endif
4172
4173 case '%':
4174 c = *p++;
4175 q = &buf[1];
4176 while (strchr ("-+ #0", c))
4177 {
4178 *q++ = c;
4179 c = *p++;
4180 }
4181 while (ISDIGIT (c) || c == '.')
4182 {
4183 *q++ = c;
4184 c = *p++;
4185 }
4186 switch (c)
4187 {
4188 case '%':
4189 putc ('%', file);
4190 break;
4191
4192 case 'd': case 'i': case 'u':
4193 case 'x': case 'X': case 'o':
4194 case 'c':
4195 *q++ = c;
4196 *q = 0;
4197 fprintf (file, buf, va_arg (argptr, int));
4198 break;
4199
4200 case 'w':
4201 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4202 'o' cases, but we do not check for those cases. It
4203 means that the value is a HOST_WIDE_INT, which may be
4204 either `long' or `long long'. */
4205 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4206 q += strlen (HOST_WIDE_INT_PRINT);
4207 *q++ = *p++;
4208 *q = 0;
4209 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4210 break;
4211
4212 case 'l':
4213 *q++ = c;
4214 #ifdef HAVE_LONG_LONG
4215 if (*p == 'l')
4216 {
4217 *q++ = *p++;
4218 *q++ = *p++;
4219 *q = 0;
4220 fprintf (file, buf, va_arg (argptr, long long));
4221 }
4222 else
4223 #endif
4224 {
4225 *q++ = *p++;
4226 *q = 0;
4227 fprintf (file, buf, va_arg (argptr, long));
4228 }
4229
4230 break;
4231
4232 case 's':
4233 *q++ = c;
4234 *q = 0;
4235 fprintf (file, buf, va_arg (argptr, char *));
4236 break;
4237
4238 case 'O':
4239 #ifdef ASM_OUTPUT_OPCODE
4240 ASM_OUTPUT_OPCODE (asm_out_file, p);
4241 #endif
4242 break;
4243
4244 case 'R':
4245 #ifdef REGISTER_PREFIX
4246 fprintf (file, "%s", REGISTER_PREFIX);
4247 #endif
4248 break;
4249
4250 case 'I':
4251 #ifdef IMMEDIATE_PREFIX
4252 fprintf (file, "%s", IMMEDIATE_PREFIX);
4253 #endif
4254 break;
4255
4256 case 'L':
4257 #ifdef LOCAL_LABEL_PREFIX
4258 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4259 #endif
4260 break;
4261
4262 case 'U':
4263 fputs (user_label_prefix, file);
4264 break;
4265
4266 #ifdef ASM_FPRINTF_EXTENSIONS
4267 /* Uppercase letters are reserved for general use by asm_fprintf
4268 and so are not available to target specific code. In order to
4269 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4270 they are defined here. As they get turned into real extensions
4271 to asm_fprintf they should be removed from this list. */
4272 case 'A': case 'B': case 'C': case 'D': case 'E':
4273 case 'F': case 'G': case 'H': case 'J': case 'K':
4274 case 'M': case 'N': case 'P': case 'Q': case 'S':
4275 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4276 break;
4277
4278 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4279 #endif
4280 default:
4281 gcc_unreachable ();
4282 }
4283 break;
4284
4285 default:
4286 putc (c, file);
4287 }
4288 va_end (argptr);
4289 }
4290 \f
4291 /* Return nonzero if this function has no function calls. */
4292
4293 int
4294 leaf_function_p (void)
4295 {
4296 rtx_insn *insn;
4297
4298 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4299 functions even if they call mcount. */
4300 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4301 return 0;
4302
4303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4304 {
4305 if (CALL_P (insn)
4306 && ! SIBLING_CALL_P (insn))
4307 return 0;
4308 if (NONJUMP_INSN_P (insn)
4309 && GET_CODE (PATTERN (insn)) == SEQUENCE
4310 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4311 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4312 return 0;
4313 }
4314
4315 return 1;
4316 }
4317
4318 /* Return 1 if branch is a forward branch.
4319 Uses insn_shuid array, so it works only in the final pass. May be used by
4320 output templates to customary add branch prediction hints.
4321 */
4322 int
4323 final_forward_branch_p (rtx_insn *insn)
4324 {
4325 int insn_id, label_id;
4326
4327 gcc_assert (uid_shuid);
4328 insn_id = INSN_SHUID (insn);
4329 label_id = INSN_SHUID (JUMP_LABEL (insn));
4330 /* We've hit some insns that does not have id information available. */
4331 gcc_assert (insn_id && label_id);
4332 return insn_id < label_id;
4333 }
4334
4335 /* On some machines, a function with no call insns
4336 can run faster if it doesn't create its own register window.
4337 When output, the leaf function should use only the "output"
4338 registers. Ordinarily, the function would be compiled to use
4339 the "input" registers to find its arguments; it is a candidate
4340 for leaf treatment if it uses only the "input" registers.
4341 Leaf function treatment means renumbering so the function
4342 uses the "output" registers instead. */
4343
4344 #ifdef LEAF_REGISTERS
4345
4346 /* Return 1 if this function uses only the registers that can be
4347 safely renumbered. */
4348
4349 int
4350 only_leaf_regs_used (void)
4351 {
4352 int i;
4353 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4354
4355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4356 if ((df_regs_ever_live_p (i) || global_regs[i])
4357 && ! permitted_reg_in_leaf_functions[i])
4358 return 0;
4359
4360 if (crtl->uses_pic_offset_table
4361 && pic_offset_table_rtx != 0
4362 && REG_P (pic_offset_table_rtx)
4363 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4364 return 0;
4365
4366 return 1;
4367 }
4368
4369 /* Scan all instructions and renumber all registers into those
4370 available in leaf functions. */
4371
4372 static void
4373 leaf_renumber_regs (rtx_insn *first)
4374 {
4375 rtx_insn *insn;
4376
4377 /* Renumber only the actual patterns.
4378 The reg-notes can contain frame pointer refs,
4379 and renumbering them could crash, and should not be needed. */
4380 for (insn = first; insn; insn = NEXT_INSN (insn))
4381 if (INSN_P (insn))
4382 leaf_renumber_regs_insn (PATTERN (insn));
4383 }
4384
4385 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4386 available in leaf functions. */
4387
4388 void
4389 leaf_renumber_regs_insn (rtx in_rtx)
4390 {
4391 int i, j;
4392 const char *format_ptr;
4393
4394 if (in_rtx == 0)
4395 return;
4396
4397 /* Renumber all input-registers into output-registers.
4398 renumbered_regs would be 1 for an output-register;
4399 they */
4400
4401 if (REG_P (in_rtx))
4402 {
4403 int newreg;
4404
4405 /* Don't renumber the same reg twice. */
4406 if (in_rtx->used)
4407 return;
4408
4409 newreg = REGNO (in_rtx);
4410 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4411 to reach here as part of a REG_NOTE. */
4412 if (newreg >= FIRST_PSEUDO_REGISTER)
4413 {
4414 in_rtx->used = 1;
4415 return;
4416 }
4417 newreg = LEAF_REG_REMAP (newreg);
4418 gcc_assert (newreg >= 0);
4419 df_set_regs_ever_live (REGNO (in_rtx), false);
4420 df_set_regs_ever_live (newreg, true);
4421 SET_REGNO (in_rtx, newreg);
4422 in_rtx->used = 1;
4423 return;
4424 }
4425
4426 if (INSN_P (in_rtx))
4427 {
4428 /* Inside a SEQUENCE, we find insns.
4429 Renumber just the patterns of these insns,
4430 just as we do for the top-level insns. */
4431 leaf_renumber_regs_insn (PATTERN (in_rtx));
4432 return;
4433 }
4434
4435 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4436
4437 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4438 switch (*format_ptr++)
4439 {
4440 case 'e':
4441 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4442 break;
4443
4444 case 'E':
4445 if (NULL != XVEC (in_rtx, i))
4446 {
4447 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4448 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4449 }
4450 break;
4451
4452 case 'S':
4453 case 's':
4454 case '0':
4455 case 'i':
4456 case 'w':
4457 case 'n':
4458 case 'u':
4459 break;
4460
4461 default:
4462 gcc_unreachable ();
4463 }
4464 }
4465 #endif
4466 \f
4467 /* Turn the RTL into assembly. */
4468 static unsigned int
4469 rest_of_handle_final (void)
4470 {
4471 const char *fnname = get_fnname_from_decl (current_function_decl);
4472
4473 assemble_start_function (current_function_decl, fnname);
4474 final_start_function (get_insns (), asm_out_file, optimize);
4475 final (get_insns (), asm_out_file, optimize);
4476 if (flag_ipa_ra)
4477 collect_fn_hard_reg_usage ();
4478 final_end_function ();
4479
4480 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4481 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4482 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4483 output_function_exception_table (fnname);
4484
4485 assemble_end_function (current_function_decl, fnname);
4486
4487 /* Free up reg info memory. */
4488 free_reg_info ();
4489
4490 if (! quiet_flag)
4491 fflush (asm_out_file);
4492
4493 /* Write DBX symbols if requested. */
4494
4495 /* Note that for those inline functions where we don't initially
4496 know for certain that we will be generating an out-of-line copy,
4497 the first invocation of this routine (rest_of_compilation) will
4498 skip over this code by doing a `goto exit_rest_of_compilation;'.
4499 Later on, wrapup_global_declarations will (indirectly) call
4500 rest_of_compilation again for those inline functions that need
4501 to have out-of-line copies generated. During that call, we
4502 *will* be routed past here. */
4503
4504 timevar_push (TV_SYMOUT);
4505 if (!DECL_IGNORED_P (current_function_decl))
4506 debug_hooks->function_decl (current_function_decl);
4507 timevar_pop (TV_SYMOUT);
4508
4509 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4510 DECL_INITIAL (current_function_decl) = error_mark_node;
4511
4512 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4513 && targetm.have_ctors_dtors)
4514 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4515 decl_init_priority_lookup
4516 (current_function_decl));
4517 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4518 && targetm.have_ctors_dtors)
4519 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4520 decl_fini_priority_lookup
4521 (current_function_decl));
4522 return 0;
4523 }
4524
4525 namespace {
4526
4527 const pass_data pass_data_final =
4528 {
4529 RTL_PASS, /* type */
4530 "final", /* name */
4531 OPTGROUP_NONE, /* optinfo_flags */
4532 TV_FINAL, /* tv_id */
4533 0, /* properties_required */
4534 0, /* properties_provided */
4535 0, /* properties_destroyed */
4536 0, /* todo_flags_start */
4537 0, /* todo_flags_finish */
4538 };
4539
4540 class pass_final : public rtl_opt_pass
4541 {
4542 public:
4543 pass_final (gcc::context *ctxt)
4544 : rtl_opt_pass (pass_data_final, ctxt)
4545 {}
4546
4547 /* opt_pass methods: */
4548 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4549
4550 }; // class pass_final
4551
4552 } // anon namespace
4553
4554 rtl_opt_pass *
4555 make_pass_final (gcc::context *ctxt)
4556 {
4557 return new pass_final (ctxt);
4558 }
4559
4560
4561 static unsigned int
4562 rest_of_handle_shorten_branches (void)
4563 {
4564 /* Shorten branches. */
4565 shorten_branches (get_insns ());
4566 return 0;
4567 }
4568
4569 namespace {
4570
4571 const pass_data pass_data_shorten_branches =
4572 {
4573 RTL_PASS, /* type */
4574 "shorten", /* name */
4575 OPTGROUP_NONE, /* optinfo_flags */
4576 TV_SHORTEN_BRANCH, /* tv_id */
4577 0, /* properties_required */
4578 0, /* properties_provided */
4579 0, /* properties_destroyed */
4580 0, /* todo_flags_start */
4581 0, /* todo_flags_finish */
4582 };
4583
4584 class pass_shorten_branches : public rtl_opt_pass
4585 {
4586 public:
4587 pass_shorten_branches (gcc::context *ctxt)
4588 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4589 {}
4590
4591 /* opt_pass methods: */
4592 virtual unsigned int execute (function *)
4593 {
4594 return rest_of_handle_shorten_branches ();
4595 }
4596
4597 }; // class pass_shorten_branches
4598
4599 } // anon namespace
4600
4601 rtl_opt_pass *
4602 make_pass_shorten_branches (gcc::context *ctxt)
4603 {
4604 return new pass_shorten_branches (ctxt);
4605 }
4606
4607
4608 static unsigned int
4609 rest_of_clean_state (void)
4610 {
4611 rtx_insn *insn, *next;
4612 FILE *final_output = NULL;
4613 int save_unnumbered = flag_dump_unnumbered;
4614 int save_noaddr = flag_dump_noaddr;
4615
4616 if (flag_dump_final_insns)
4617 {
4618 final_output = fopen (flag_dump_final_insns, "a");
4619 if (!final_output)
4620 {
4621 error ("could not open final insn dump file %qs: %m",
4622 flag_dump_final_insns);
4623 flag_dump_final_insns = NULL;
4624 }
4625 else
4626 {
4627 flag_dump_noaddr = flag_dump_unnumbered = 1;
4628 if (flag_compare_debug_opt || flag_compare_debug)
4629 dump_flags |= TDF_NOUID;
4630 dump_function_header (final_output, current_function_decl,
4631 dump_flags);
4632 final_insns_dump_p = true;
4633
4634 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4635 if (LABEL_P (insn))
4636 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4637 else
4638 {
4639 if (NOTE_P (insn))
4640 set_block_for_insn (insn, NULL);
4641 INSN_UID (insn) = 0;
4642 }
4643 }
4644 }
4645
4646 /* It is very important to decompose the RTL instruction chain here:
4647 debug information keeps pointing into CODE_LABEL insns inside the function
4648 body. If these remain pointing to the other insns, we end up preserving
4649 whole RTL chain and attached detailed debug info in memory. */
4650 for (insn = get_insns (); insn; insn = next)
4651 {
4652 next = NEXT_INSN (insn);
4653 SET_NEXT_INSN (insn) = NULL;
4654 SET_PREV_INSN (insn) = NULL;
4655
4656 if (final_output
4657 && (!NOTE_P (insn) ||
4658 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4659 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4660 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4661 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4662 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4663 print_rtl_single (final_output, insn);
4664 }
4665
4666 if (final_output)
4667 {
4668 flag_dump_noaddr = save_noaddr;
4669 flag_dump_unnumbered = save_unnumbered;
4670 final_insns_dump_p = false;
4671
4672 if (fclose (final_output))
4673 {
4674 error ("could not close final insn dump file %qs: %m",
4675 flag_dump_final_insns);
4676 flag_dump_final_insns = NULL;
4677 }
4678 }
4679
4680 /* In case the function was not output,
4681 don't leave any temporary anonymous types
4682 queued up for sdb output. */
4683 if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4684 sdbout_types (NULL_TREE);
4685
4686 flag_rerun_cse_after_global_opts = 0;
4687 reload_completed = 0;
4688 epilogue_completed = 0;
4689 #ifdef STACK_REGS
4690 regstack_completed = 0;
4691 #endif
4692
4693 /* Clear out the insn_length contents now that they are no
4694 longer valid. */
4695 init_insn_lengths ();
4696
4697 /* Show no temporary slots allocated. */
4698 init_temp_slots ();
4699
4700 free_bb_for_insn ();
4701
4702 if (cfun->gimple_df)
4703 delete_tree_ssa (cfun);
4704
4705 /* We can reduce stack alignment on call site only when we are sure that
4706 the function body just produced will be actually used in the final
4707 executable. */
4708 if (decl_binds_to_current_def_p (current_function_decl))
4709 {
4710 unsigned int pref = crtl->preferred_stack_boundary;
4711 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4712 pref = crtl->stack_alignment_needed;
4713 cgraph_node::rtl_info (current_function_decl)
4714 ->preferred_incoming_stack_boundary = pref;
4715 }
4716
4717 /* Make sure volatile mem refs aren't considered valid operands for
4718 arithmetic insns. We must call this here if this is a nested inline
4719 function, since the above code leaves us in the init_recog state,
4720 and the function context push/pop code does not save/restore volatile_ok.
4721
4722 ??? Maybe it isn't necessary for expand_start_function to call this
4723 anymore if we do it here? */
4724
4725 init_recog_no_volatile ();
4726
4727 /* We're done with this function. Free up memory if we can. */
4728 free_after_parsing (cfun);
4729 free_after_compilation (cfun);
4730 return 0;
4731 }
4732
4733 namespace {
4734
4735 const pass_data pass_data_clean_state =
4736 {
4737 RTL_PASS, /* type */
4738 "*clean_state", /* name */
4739 OPTGROUP_NONE, /* optinfo_flags */
4740 TV_FINAL, /* tv_id */
4741 0, /* properties_required */
4742 0, /* properties_provided */
4743 PROP_rtl, /* properties_destroyed */
4744 0, /* todo_flags_start */
4745 0, /* todo_flags_finish */
4746 };
4747
4748 class pass_clean_state : public rtl_opt_pass
4749 {
4750 public:
4751 pass_clean_state (gcc::context *ctxt)
4752 : rtl_opt_pass (pass_data_clean_state, ctxt)
4753 {}
4754
4755 /* opt_pass methods: */
4756 virtual unsigned int execute (function *)
4757 {
4758 return rest_of_clean_state ();
4759 }
4760
4761 }; // class pass_clean_state
4762
4763 } // anon namespace
4764
4765 rtl_opt_pass *
4766 make_pass_clean_state (gcc::context *ctxt)
4767 {
4768 return new pass_clean_state (ctxt);
4769 }
4770
4771 /* Return true if INSN is a call to the current function. */
4772
4773 static bool
4774 self_recursive_call_p (rtx_insn *insn)
4775 {
4776 tree fndecl = get_call_fndecl (insn);
4777 return (fndecl == current_function_decl
4778 && decl_binds_to_current_def_p (fndecl));
4779 }
4780
4781 /* Collect hard register usage for the current function. */
4782
4783 static void
4784 collect_fn_hard_reg_usage (void)
4785 {
4786 rtx_insn *insn;
4787 #ifdef STACK_REGS
4788 int i;
4789 #endif
4790 struct cgraph_rtl_info *node;
4791 HARD_REG_SET function_used_regs;
4792
4793 /* ??? To be removed when all the ports have been fixed. */
4794 if (!targetm.call_fusage_contains_non_callee_clobbers)
4795 return;
4796
4797 CLEAR_HARD_REG_SET (function_used_regs);
4798
4799 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4800 {
4801 HARD_REG_SET insn_used_regs;
4802
4803 if (!NONDEBUG_INSN_P (insn))
4804 continue;
4805
4806 if (CALL_P (insn)
4807 && !self_recursive_call_p (insn))
4808 {
4809 if (!get_call_reg_set_usage (insn, &insn_used_regs,
4810 call_used_reg_set))
4811 return;
4812
4813 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4814 }
4815
4816 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4817 IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4818 }
4819
4820 /* Be conservative - mark fixed and global registers as used. */
4821 IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4822
4823 #ifdef STACK_REGS
4824 /* Handle STACK_REGS conservatively, since the df-framework does not
4825 provide accurate information for them. */
4826
4827 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4828 SET_HARD_REG_BIT (function_used_regs, i);
4829 #endif
4830
4831 /* The information we have gathered is only interesting if it exposes a
4832 register from the call_used_regs that is not used in this function. */
4833 if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4834 return;
4835
4836 node = cgraph_node::rtl_info (current_function_decl);
4837 gcc_assert (node != NULL);
4838
4839 COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4840 node->function_used_regs_valid = 1;
4841 }
4842
4843 /* Get the declaration of the function called by INSN. */
4844
4845 static tree
4846 get_call_fndecl (rtx_insn *insn)
4847 {
4848 rtx note, datum;
4849
4850 note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4851 if (note == NULL_RTX)
4852 return NULL_TREE;
4853
4854 datum = XEXP (note, 0);
4855 if (datum != NULL_RTX)
4856 return SYMBOL_REF_DECL (datum);
4857
4858 return NULL_TREE;
4859 }
4860
4861 /* Return the cgraph_rtl_info of the function called by INSN. Returns NULL for
4862 call targets that can be overwritten. */
4863
4864 static struct cgraph_rtl_info *
4865 get_call_cgraph_rtl_info (rtx_insn *insn)
4866 {
4867 tree fndecl;
4868
4869 if (insn == NULL_RTX)
4870 return NULL;
4871
4872 fndecl = get_call_fndecl (insn);
4873 if (fndecl == NULL_TREE
4874 || !decl_binds_to_current_def_p (fndecl))
4875 return NULL;
4876
4877 return cgraph_node::rtl_info (fndecl);
4878 }
4879
4880 /* Find hard registers used by function call instruction INSN, and return them
4881 in REG_SET. Return DEFAULT_SET in REG_SET if not found. */
4882
4883 bool
4884 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
4885 HARD_REG_SET default_set)
4886 {
4887 if (flag_ipa_ra)
4888 {
4889 struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
4890 if (node != NULL
4891 && node->function_used_regs_valid)
4892 {
4893 COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
4894 AND_HARD_REG_SET (*reg_set, default_set);
4895 return true;
4896 }
4897 }
4898
4899 COPY_HARD_REG_SET (*reg_set, default_set);
4900 return false;
4901 }