re PR debug/36728 ([stack]: gdb doesn't work with stack alignment)
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012
5 Free Software Foundation, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This is the final pass of the compiler.
24 It looks at the rtl code for a function and outputs assembler code.
25
26 Call `final_start_function' to output the assembler code for function entry,
27 `final' to output assembler code for some RTL code,
28 `final_end_function' to output assembler code for function exit.
29 If a function is compiled in several pieces, each piece is
30 output separately with `final'.
31
32 Some optimizations are also done at this level.
33 Move instructions that were made unnecessary by good register allocation
34 are detected and omitted from the output. (Though most of these
35 are removed by the last jump pass.)
36
37 Instructions to set the condition codes are omitted when it can be
38 seen that the condition codes already had the desired values.
39
40 In some cases it is sufficient if the inherited condition codes
41 have related values, but this may require the following insn
42 (the one that tests the condition codes) to be modified.
43
44 The code for the function prologue and epilogue are generated
45 directly in assembler by the target functions function_prologue and
46 function_epilogue. Those instructions never exist as rtl. */
47
48 #include "config.h"
49 #include "system.h"
50 #include "coretypes.h"
51 #include "tm.h"
52
53 #include "tree.h"
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
59 #include "recog.h"
60 #include "conditions.h"
61 #include "flags.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "rtl-error.h"
67 #include "toplev.h" /* exact_log2, floor_log2 */
68 #include "reload.h"
69 #include "intl.h"
70 #include "basic-block.h"
71 #include "target.h"
72 #include "targhooks.h"
73 #include "debug.h"
74 #include "expr.h"
75 #include "tree-pass.h"
76 #include "tree-flow.h"
77 #include "cgraph.h"
78 #include "coverage.h"
79 #include "df.h"
80 #include "ggc.h"
81 #include "cfgloop.h"
82 #include "params.h"
83 #include "tree-pretty-print.h" /* for dump_function_header */
84
85 #ifdef XCOFF_DEBUGGING_INFO
86 #include "xcoffout.h" /* Needed for external data
87 declarations for e.g. AIX 4.x. */
88 #endif
89
90 #include "dwarf2out.h"
91
92 #ifdef DBX_DEBUGGING_INFO
93 #include "dbxout.h"
94 #endif
95
96 #ifdef SDB_DEBUGGING_INFO
97 #include "sdbout.h"
98 #endif
99
100 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
101 So define a null default for it to save conditionalization later. */
102 #ifndef CC_STATUS_INIT
103 #define CC_STATUS_INIT
104 #endif
105
106 /* Is the given character a logical line separator for the assembler? */
107 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
108 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
109 #endif
110
111 #ifndef JUMP_TABLES_IN_TEXT_SECTION
112 #define JUMP_TABLES_IN_TEXT_SECTION 0
113 #endif
114
115 /* Bitflags used by final_scan_insn. */
116 #define SEEN_BB 1
117 #define SEEN_NOTE 2
118 #define SEEN_EMITTED 4
119
120 /* Last insn processed by final_scan_insn. */
121 static rtx debug_insn;
122 rtx current_output_insn;
123
124 /* Line number of last NOTE. */
125 static int last_linenum;
126
127 /* Last discriminator written to assembly. */
128 static int last_discriminator;
129
130 /* Discriminator of current block. */
131 static int discriminator;
132
133 /* Highest line number in current block. */
134 static int high_block_linenum;
135
136 /* Likewise for function. */
137 static int high_function_linenum;
138
139 /* Filename of last NOTE. */
140 static const char *last_filename;
141
142 /* Override filename and line number. */
143 static const char *override_filename;
144 static int override_linenum;
145
146 /* Whether to force emission of a line note before the next insn. */
147 static bool force_source_line = false;
148
149 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
150
151 /* Nonzero while outputting an `asm' with operands.
152 This means that inconsistencies are the user's fault, so don't die.
153 The precise value is the insn being output, to pass to error_for_asm. */
154 rtx this_is_asm_operands;
155
156 /* Number of operands of this insn, for an `asm' with operands. */
157 static unsigned int insn_noperands;
158
159 /* Compare optimization flag. */
160
161 static rtx last_ignored_compare = 0;
162
163 /* Assign a unique number to each insn that is output.
164 This can be used to generate unique local labels. */
165
166 static int insn_counter = 0;
167
168 #ifdef HAVE_cc0
169 /* This variable contains machine-dependent flags (defined in tm.h)
170 set and examined by output routines
171 that describe how to interpret the condition codes properly. */
172
173 CC_STATUS cc_status;
174
175 /* During output of an insn, this contains a copy of cc_status
176 from before the insn. */
177
178 CC_STATUS cc_prev_status;
179 #endif
180
181 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
182
183 static int block_depth;
184
185 /* Nonzero if have enabled APP processing of our assembler output. */
186
187 static int app_on;
188
189 /* If we are outputting an insn sequence, this contains the sequence rtx.
190 Zero otherwise. */
191
192 rtx final_sequence;
193
194 #ifdef ASSEMBLER_DIALECT
195
196 /* Number of the assembler dialect to use, starting at 0. */
197 static int dialect_number;
198 #endif
199
200 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
201 rtx current_insn_predicate;
202
203 /* True if printing into -fdump-final-insns= dump. */
204 bool final_insns_dump_p;
205
206 static int asm_insn_count (rtx);
207 static void profile_function (FILE *);
208 static void profile_after_prologue (FILE *);
209 static bool notice_source_line (rtx, bool *);
210 static rtx walk_alter_subreg (rtx *, bool *);
211 static void output_asm_name (void);
212 static void output_alternate_entry_point (FILE *, rtx);
213 static tree get_mem_expr_from_op (rtx, int *);
214 static void output_asm_operand_names (rtx *, int *, int);
215 #ifdef LEAF_REGISTERS
216 static void leaf_renumber_regs (rtx);
217 #endif
218 #ifdef HAVE_cc0
219 static int alter_cond (rtx);
220 #endif
221 #ifndef ADDR_VEC_ALIGN
222 static int final_addr_vec_align (rtx);
223 #endif
224 static int align_fuzz (rtx, rtx, int, unsigned);
225 \f
226 /* Initialize data in final at the beginning of a compilation. */
227
228 void
229 init_final (const char *filename ATTRIBUTE_UNUSED)
230 {
231 app_on = 0;
232 final_sequence = 0;
233
234 #ifdef ASSEMBLER_DIALECT
235 dialect_number = ASSEMBLER_DIALECT;
236 #endif
237 }
238
239 /* Default target function prologue and epilogue assembler output.
240
241 If not overridden for epilogue code, then the function body itself
242 contains return instructions wherever needed. */
243 void
244 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
245 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
246 {
247 }
248
249 void
250 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
251 tree decl ATTRIBUTE_UNUSED,
252 bool new_is_cold ATTRIBUTE_UNUSED)
253 {
254 }
255
256 /* Default target hook that outputs nothing to a stream. */
257 void
258 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
259 {
260 }
261
262 /* Enable APP processing of subsequent output.
263 Used before the output from an `asm' statement. */
264
265 void
266 app_enable (void)
267 {
268 if (! app_on)
269 {
270 fputs (ASM_APP_ON, asm_out_file);
271 app_on = 1;
272 }
273 }
274
275 /* Disable APP processing of subsequent output.
276 Called from varasm.c before most kinds of output. */
277
278 void
279 app_disable (void)
280 {
281 if (app_on)
282 {
283 fputs (ASM_APP_OFF, asm_out_file);
284 app_on = 0;
285 }
286 }
287 \f
288 /* Return the number of slots filled in the current
289 delayed branch sequence (we don't count the insn needing the
290 delay slot). Zero if not in a delayed branch sequence. */
291
292 #ifdef DELAY_SLOTS
293 int
294 dbr_sequence_length (void)
295 {
296 if (final_sequence != 0)
297 return XVECLEN (final_sequence, 0) - 1;
298 else
299 return 0;
300 }
301 #endif
302 \f
303 /* The next two pages contain routines used to compute the length of an insn
304 and to shorten branches. */
305
306 /* Arrays for insn lengths, and addresses. The latter is referenced by
307 `insn_current_length'. */
308
309 static int *insn_lengths;
310
311 vec<int> insn_addresses_;
312
313 /* Max uid for which the above arrays are valid. */
314 static int insn_lengths_max_uid;
315
316 /* Address of insn being processed. Used by `insn_current_length'. */
317 int insn_current_address;
318
319 /* Address of insn being processed in previous iteration. */
320 int insn_last_address;
321
322 /* known invariant alignment of insn being processed. */
323 int insn_current_align;
324
325 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
326 gives the next following alignment insn that increases the known
327 alignment, or NULL_RTX if there is no such insn.
328 For any alignment obtained this way, we can again index uid_align with
329 its uid to obtain the next following align that in turn increases the
330 alignment, till we reach NULL_RTX; the sequence obtained this way
331 for each insn we'll call the alignment chain of this insn in the following
332 comments. */
333
334 struct label_alignment
335 {
336 short alignment;
337 short max_skip;
338 };
339
340 static rtx *uid_align;
341 static int *uid_shuid;
342 static struct label_alignment *label_align;
343
344 /* Indicate that branch shortening hasn't yet been done. */
345
346 void
347 init_insn_lengths (void)
348 {
349 if (uid_shuid)
350 {
351 free (uid_shuid);
352 uid_shuid = 0;
353 }
354 if (insn_lengths)
355 {
356 free (insn_lengths);
357 insn_lengths = 0;
358 insn_lengths_max_uid = 0;
359 }
360 if (HAVE_ATTR_length)
361 INSN_ADDRESSES_FREE ();
362 if (uid_align)
363 {
364 free (uid_align);
365 uid_align = 0;
366 }
367 }
368
369 /* Obtain the current length of an insn. If branch shortening has been done,
370 get its actual length. Otherwise, use FALLBACK_FN to calculate the
371 length. */
372 static inline int
373 get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
374 {
375 rtx body;
376 int i;
377 int length = 0;
378
379 if (!HAVE_ATTR_length)
380 return 0;
381
382 if (insn_lengths_max_uid > INSN_UID (insn))
383 return insn_lengths[INSN_UID (insn)];
384 else
385 switch (GET_CODE (insn))
386 {
387 case NOTE:
388 case BARRIER:
389 case CODE_LABEL:
390 case DEBUG_INSN:
391 return 0;
392
393 case CALL_INSN:
394 length = fallback_fn (insn);
395 break;
396
397 case JUMP_INSN:
398 body = PATTERN (insn);
399 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
400 {
401 /* Alignment is machine-dependent and should be handled by
402 ADDR_VEC_ALIGN. */
403 }
404 else
405 length = fallback_fn (insn);
406 break;
407
408 case INSN:
409 body = PATTERN (insn);
410 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
411 return 0;
412
413 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
414 length = asm_insn_count (body) * fallback_fn (insn);
415 else if (GET_CODE (body) == SEQUENCE)
416 for (i = 0; i < XVECLEN (body, 0); i++)
417 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
418 else
419 length = fallback_fn (insn);
420 break;
421
422 default:
423 break;
424 }
425
426 #ifdef ADJUST_INSN_LENGTH
427 ADJUST_INSN_LENGTH (insn, length);
428 #endif
429 return length;
430 }
431
432 /* Obtain the current length of an insn. If branch shortening has been done,
433 get its actual length. Otherwise, get its maximum length. */
434 int
435 get_attr_length (rtx insn)
436 {
437 return get_attr_length_1 (insn, insn_default_length);
438 }
439
440 /* Obtain the current length of an insn. If branch shortening has been done,
441 get its actual length. Otherwise, get its minimum length. */
442 int
443 get_attr_min_length (rtx insn)
444 {
445 return get_attr_length_1 (insn, insn_min_length);
446 }
447 \f
448 /* Code to handle alignment inside shorten_branches. */
449
450 /* Here is an explanation how the algorithm in align_fuzz can give
451 proper results:
452
453 Call a sequence of instructions beginning with alignment point X
454 and continuing until the next alignment point `block X'. When `X'
455 is used in an expression, it means the alignment value of the
456 alignment point.
457
458 Call the distance between the start of the first insn of block X, and
459 the end of the last insn of block X `IX', for the `inner size of X'.
460 This is clearly the sum of the instruction lengths.
461
462 Likewise with the next alignment-delimited block following X, which we
463 shall call block Y.
464
465 Call the distance between the start of the first insn of block X, and
466 the start of the first insn of block Y `OX', for the `outer size of X'.
467
468 The estimated padding is then OX - IX.
469
470 OX can be safely estimated as
471
472 if (X >= Y)
473 OX = round_up(IX, Y)
474 else
475 OX = round_up(IX, X) + Y - X
476
477 Clearly est(IX) >= real(IX), because that only depends on the
478 instruction lengths, and those being overestimated is a given.
479
480 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
481 we needn't worry about that when thinking about OX.
482
483 When X >= Y, the alignment provided by Y adds no uncertainty factor
484 for branch ranges starting before X, so we can just round what we have.
485 But when X < Y, we don't know anything about the, so to speak,
486 `middle bits', so we have to assume the worst when aligning up from an
487 address mod X to one mod Y, which is Y - X. */
488
489 #ifndef LABEL_ALIGN
490 #define LABEL_ALIGN(LABEL) align_labels_log
491 #endif
492
493 #ifndef LOOP_ALIGN
494 #define LOOP_ALIGN(LABEL) align_loops_log
495 #endif
496
497 #ifndef LABEL_ALIGN_AFTER_BARRIER
498 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
499 #endif
500
501 #ifndef JUMP_ALIGN
502 #define JUMP_ALIGN(LABEL) align_jumps_log
503 #endif
504
505 int
506 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
507 {
508 return 0;
509 }
510
511 int
512 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
513 {
514 return align_loops_max_skip;
515 }
516
517 int
518 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
519 {
520 return align_labels_max_skip;
521 }
522
523 int
524 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
525 {
526 return align_jumps_max_skip;
527 }
528
529 #ifndef ADDR_VEC_ALIGN
530 static int
531 final_addr_vec_align (rtx addr_vec)
532 {
533 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
534
535 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
536 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
537 return exact_log2 (align);
538
539 }
540
541 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
542 #endif
543
544 #ifndef INSN_LENGTH_ALIGNMENT
545 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
546 #endif
547
548 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
549
550 static int min_labelno, max_labelno;
551
552 #define LABEL_TO_ALIGNMENT(LABEL) \
553 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
554
555 #define LABEL_TO_MAX_SKIP(LABEL) \
556 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
557
558 /* For the benefit of port specific code do this also as a function. */
559
560 int
561 label_to_alignment (rtx label)
562 {
563 if (CODE_LABEL_NUMBER (label) <= max_labelno)
564 return LABEL_TO_ALIGNMENT (label);
565 return 0;
566 }
567
568 int
569 label_to_max_skip (rtx label)
570 {
571 if (CODE_LABEL_NUMBER (label) <= max_labelno)
572 return LABEL_TO_MAX_SKIP (label);
573 return 0;
574 }
575
576 /* The differences in addresses
577 between a branch and its target might grow or shrink depending on
578 the alignment the start insn of the range (the branch for a forward
579 branch or the label for a backward branch) starts out on; if these
580 differences are used naively, they can even oscillate infinitely.
581 We therefore want to compute a 'worst case' address difference that
582 is independent of the alignment the start insn of the range end
583 up on, and that is at least as large as the actual difference.
584 The function align_fuzz calculates the amount we have to add to the
585 naively computed difference, by traversing the part of the alignment
586 chain of the start insn of the range that is in front of the end insn
587 of the range, and considering for each alignment the maximum amount
588 that it might contribute to a size increase.
589
590 For casesi tables, we also want to know worst case minimum amounts of
591 address difference, in case a machine description wants to introduce
592 some common offset that is added to all offsets in a table.
593 For this purpose, align_fuzz with a growth argument of 0 computes the
594 appropriate adjustment. */
595
596 /* Compute the maximum delta by which the difference of the addresses of
597 START and END might grow / shrink due to a different address for start
598 which changes the size of alignment insns between START and END.
599 KNOWN_ALIGN_LOG is the alignment known for START.
600 GROWTH should be ~0 if the objective is to compute potential code size
601 increase, and 0 if the objective is to compute potential shrink.
602 The return value is undefined for any other value of GROWTH. */
603
604 static int
605 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
606 {
607 int uid = INSN_UID (start);
608 rtx align_label;
609 int known_align = 1 << known_align_log;
610 int end_shuid = INSN_SHUID (end);
611 int fuzz = 0;
612
613 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
614 {
615 int align_addr, new_align;
616
617 uid = INSN_UID (align_label);
618 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
619 if (uid_shuid[uid] > end_shuid)
620 break;
621 known_align_log = LABEL_TO_ALIGNMENT (align_label);
622 new_align = 1 << known_align_log;
623 if (new_align < known_align)
624 continue;
625 fuzz += (-align_addr ^ growth) & (new_align - known_align);
626 known_align = new_align;
627 }
628 return fuzz;
629 }
630
631 /* Compute a worst-case reference address of a branch so that it
632 can be safely used in the presence of aligned labels. Since the
633 size of the branch itself is unknown, the size of the branch is
634 not included in the range. I.e. for a forward branch, the reference
635 address is the end address of the branch as known from the previous
636 branch shortening pass, minus a value to account for possible size
637 increase due to alignment. For a backward branch, it is the start
638 address of the branch as known from the current pass, plus a value
639 to account for possible size increase due to alignment.
640 NB.: Therefore, the maximum offset allowed for backward branches needs
641 to exclude the branch size. */
642
643 int
644 insn_current_reference_address (rtx branch)
645 {
646 rtx dest, seq;
647 int seq_uid;
648
649 if (! INSN_ADDRESSES_SET_P ())
650 return 0;
651
652 seq = NEXT_INSN (PREV_INSN (branch));
653 seq_uid = INSN_UID (seq);
654 if (!JUMP_P (branch))
655 /* This can happen for example on the PA; the objective is to know the
656 offset to address something in front of the start of the function.
657 Thus, we can treat it like a backward branch.
658 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
659 any alignment we'd encounter, so we skip the call to align_fuzz. */
660 return insn_current_address;
661 dest = JUMP_LABEL (branch);
662
663 /* BRANCH has no proper alignment chain set, so use SEQ.
664 BRANCH also has no INSN_SHUID. */
665 if (INSN_SHUID (seq) < INSN_SHUID (dest))
666 {
667 /* Forward branch. */
668 return (insn_last_address + insn_lengths[seq_uid]
669 - align_fuzz (seq, dest, length_unit_log, ~0));
670 }
671 else
672 {
673 /* Backward branch. */
674 return (insn_current_address
675 + align_fuzz (dest, seq, length_unit_log, ~0));
676 }
677 }
678 \f
679 /* Compute branch alignments based on frequency information in the
680 CFG. */
681
682 unsigned int
683 compute_alignments (void)
684 {
685 int log, max_skip, max_log;
686 basic_block bb;
687 int freq_max = 0;
688 int freq_threshold = 0;
689
690 if (label_align)
691 {
692 free (label_align);
693 label_align = 0;
694 }
695
696 max_labelno = max_label_num ();
697 min_labelno = get_first_label_num ();
698 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
699
700 /* If not optimizing or optimizing for size, don't assign any alignments. */
701 if (! optimize || optimize_function_for_size_p (cfun))
702 return 0;
703
704 if (dump_file)
705 {
706 dump_reg_info (dump_file);
707 dump_flow_info (dump_file, TDF_DETAILS);
708 flow_loops_dump (dump_file, NULL, 1);
709 }
710 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
711 FOR_EACH_BB (bb)
712 if (bb->frequency > freq_max)
713 freq_max = bb->frequency;
714 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
715
716 if (dump_file)
717 fprintf(dump_file, "freq_max: %i\n",freq_max);
718 FOR_EACH_BB (bb)
719 {
720 rtx label = BB_HEAD (bb);
721 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
722 edge e;
723 edge_iterator ei;
724
725 if (!LABEL_P (label)
726 || optimize_bb_for_size_p (bb))
727 {
728 if (dump_file)
729 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
730 bb->index, bb->frequency, bb->loop_father->num,
731 bb_loop_depth (bb));
732 continue;
733 }
734 max_log = LABEL_ALIGN (label);
735 max_skip = targetm.asm_out.label_align_max_skip (label);
736
737 FOR_EACH_EDGE (e, ei, bb->preds)
738 {
739 if (e->flags & EDGE_FALLTHRU)
740 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
741 else
742 branch_frequency += EDGE_FREQUENCY (e);
743 }
744 if (dump_file)
745 {
746 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
747 bb->index, bb->frequency, bb->loop_father->num,
748 bb_loop_depth (bb),
749 fallthru_frequency, branch_frequency);
750 if (!bb->loop_father->inner && bb->loop_father->num)
751 fprintf (dump_file, " inner_loop");
752 if (bb->loop_father->header == bb)
753 fprintf (dump_file, " loop_header");
754 fprintf (dump_file, "\n");
755 }
756
757 /* There are two purposes to align block with no fallthru incoming edge:
758 1) to avoid fetch stalls when branch destination is near cache boundary
759 2) to improve cache efficiency in case the previous block is not executed
760 (so it does not need to be in the cache).
761
762 We to catch first case, we align frequently executed blocks.
763 To catch the second, we align blocks that are executed more frequently
764 than the predecessor and the predecessor is likely to not be executed
765 when function is called. */
766
767 if (!has_fallthru
768 && (branch_frequency > freq_threshold
769 || (bb->frequency > bb->prev_bb->frequency * 10
770 && (bb->prev_bb->frequency
771 <= ENTRY_BLOCK_PTR->frequency / 2))))
772 {
773 log = JUMP_ALIGN (label);
774 if (dump_file)
775 fprintf(dump_file, " jump alignment added.\n");
776 if (max_log < log)
777 {
778 max_log = log;
779 max_skip = targetm.asm_out.jump_align_max_skip (label);
780 }
781 }
782 /* In case block is frequent and reached mostly by non-fallthru edge,
783 align it. It is most likely a first block of loop. */
784 if (has_fallthru
785 && optimize_bb_for_speed_p (bb)
786 && branch_frequency + fallthru_frequency > freq_threshold
787 && (branch_frequency
788 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
789 {
790 log = LOOP_ALIGN (label);
791 if (dump_file)
792 fprintf(dump_file, " internal loop alignment added.\n");
793 if (max_log < log)
794 {
795 max_log = log;
796 max_skip = targetm.asm_out.loop_align_max_skip (label);
797 }
798 }
799 LABEL_TO_ALIGNMENT (label) = max_log;
800 LABEL_TO_MAX_SKIP (label) = max_skip;
801 }
802
803 loop_optimizer_finalize ();
804 free_dominance_info (CDI_DOMINATORS);
805 return 0;
806 }
807
808 struct rtl_opt_pass pass_compute_alignments =
809 {
810 {
811 RTL_PASS,
812 "alignments", /* name */
813 OPTGROUP_NONE, /* optinfo_flags */
814 NULL, /* gate */
815 compute_alignments, /* execute */
816 NULL, /* sub */
817 NULL, /* next */
818 0, /* static_pass_number */
819 TV_NONE, /* tv_id */
820 0, /* properties_required */
821 0, /* properties_provided */
822 0, /* properties_destroyed */
823 0, /* todo_flags_start */
824 TODO_verify_rtl_sharing
825 | TODO_ggc_collect /* todo_flags_finish */
826 }
827 };
828
829 \f
830 /* Make a pass over all insns and compute their actual lengths by shortening
831 any branches of variable length if possible. */
832
833 /* shorten_branches might be called multiple times: for example, the SH
834 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
835 In order to do this, it needs proper length information, which it obtains
836 by calling shorten_branches. This cannot be collapsed with
837 shorten_branches itself into a single pass unless we also want to integrate
838 reorg.c, since the branch splitting exposes new instructions with delay
839 slots. */
840
841 void
842 shorten_branches (rtx first)
843 {
844 rtx insn;
845 int max_uid;
846 int i;
847 int max_log;
848 int max_skip;
849 #define MAX_CODE_ALIGN 16
850 rtx seq;
851 int something_changed = 1;
852 char *varying_length;
853 rtx body;
854 int uid;
855 rtx align_tab[MAX_CODE_ALIGN];
856
857 /* Compute maximum UID and allocate label_align / uid_shuid. */
858 max_uid = get_max_uid ();
859
860 /* Free uid_shuid before reallocating it. */
861 free (uid_shuid);
862
863 uid_shuid = XNEWVEC (int, max_uid);
864
865 if (max_labelno != max_label_num ())
866 {
867 int old = max_labelno;
868 int n_labels;
869 int n_old_labels;
870
871 max_labelno = max_label_num ();
872
873 n_labels = max_labelno - min_labelno + 1;
874 n_old_labels = old - min_labelno + 1;
875
876 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
877
878 /* Range of labels grows monotonically in the function. Failing here
879 means that the initialization of array got lost. */
880 gcc_assert (n_old_labels <= n_labels);
881
882 memset (label_align + n_old_labels, 0,
883 (n_labels - n_old_labels) * sizeof (struct label_alignment));
884 }
885
886 /* Initialize label_align and set up uid_shuid to be strictly
887 monotonically rising with insn order. */
888 /* We use max_log here to keep track of the maximum alignment we want to
889 impose on the next CODE_LABEL (or the current one if we are processing
890 the CODE_LABEL itself). */
891
892 max_log = 0;
893 max_skip = 0;
894
895 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
896 {
897 int log;
898
899 INSN_SHUID (insn) = i++;
900 if (INSN_P (insn))
901 continue;
902
903 if (LABEL_P (insn))
904 {
905 rtx next;
906 bool next_is_jumptable;
907
908 /* Merge in alignments computed by compute_alignments. */
909 log = LABEL_TO_ALIGNMENT (insn);
910 if (max_log < log)
911 {
912 max_log = log;
913 max_skip = LABEL_TO_MAX_SKIP (insn);
914 }
915
916 next = next_nonnote_insn (insn);
917 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
918 if (!next_is_jumptable)
919 {
920 log = LABEL_ALIGN (insn);
921 if (max_log < log)
922 {
923 max_log = log;
924 max_skip = targetm.asm_out.label_align_max_skip (insn);
925 }
926 }
927 /* ADDR_VECs only take room if read-only data goes into the text
928 section. */
929 if ((JUMP_TABLES_IN_TEXT_SECTION
930 || readonly_data_section == text_section)
931 && next_is_jumptable)
932 {
933 log = ADDR_VEC_ALIGN (next);
934 if (max_log < log)
935 {
936 max_log = log;
937 max_skip = targetm.asm_out.label_align_max_skip (insn);
938 }
939 }
940 LABEL_TO_ALIGNMENT (insn) = max_log;
941 LABEL_TO_MAX_SKIP (insn) = max_skip;
942 max_log = 0;
943 max_skip = 0;
944 }
945 else if (BARRIER_P (insn))
946 {
947 rtx label;
948
949 for (label = insn; label && ! INSN_P (label);
950 label = NEXT_INSN (label))
951 if (LABEL_P (label))
952 {
953 log = LABEL_ALIGN_AFTER_BARRIER (insn);
954 if (max_log < log)
955 {
956 max_log = log;
957 max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
958 }
959 break;
960 }
961 }
962 }
963 if (!HAVE_ATTR_length)
964 return;
965
966 /* Allocate the rest of the arrays. */
967 insn_lengths = XNEWVEC (int, max_uid);
968 insn_lengths_max_uid = max_uid;
969 /* Syntax errors can lead to labels being outside of the main insn stream.
970 Initialize insn_addresses, so that we get reproducible results. */
971 INSN_ADDRESSES_ALLOC (max_uid);
972
973 varying_length = XCNEWVEC (char, max_uid);
974
975 /* Initialize uid_align. We scan instructions
976 from end to start, and keep in align_tab[n] the last seen insn
977 that does an alignment of at least n+1, i.e. the successor
978 in the alignment chain for an insn that does / has a known
979 alignment of n. */
980 uid_align = XCNEWVEC (rtx, max_uid);
981
982 for (i = MAX_CODE_ALIGN; --i >= 0;)
983 align_tab[i] = NULL_RTX;
984 seq = get_last_insn ();
985 for (; seq; seq = PREV_INSN (seq))
986 {
987 int uid = INSN_UID (seq);
988 int log;
989 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
990 uid_align[uid] = align_tab[0];
991 if (log)
992 {
993 /* Found an alignment label. */
994 uid_align[uid] = align_tab[log];
995 for (i = log - 1; i >= 0; i--)
996 align_tab[i] = seq;
997 }
998 }
999
1000 /* When optimizing, we start assuming minimum length, and keep increasing
1001 lengths as we find the need for this, till nothing changes.
1002 When not optimizing, we start assuming maximum lengths, and
1003 do a single pass to update the lengths. */
1004 bool increasing = optimize != 0;
1005
1006 #ifdef CASE_VECTOR_SHORTEN_MODE
1007 if (optimize)
1008 {
1009 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1010 label fields. */
1011
1012 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1013 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1014 int rel;
1015
1016 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1017 {
1018 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1019 int len, i, min, max, insn_shuid;
1020 int min_align;
1021 addr_diff_vec_flags flags;
1022
1023 if (!JUMP_P (insn)
1024 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1025 continue;
1026 pat = PATTERN (insn);
1027 len = XVECLEN (pat, 1);
1028 gcc_assert (len > 0);
1029 min_align = MAX_CODE_ALIGN;
1030 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1031 {
1032 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1033 int shuid = INSN_SHUID (lab);
1034 if (shuid < min)
1035 {
1036 min = shuid;
1037 min_lab = lab;
1038 }
1039 if (shuid > max)
1040 {
1041 max = shuid;
1042 max_lab = lab;
1043 }
1044 if (min_align > LABEL_TO_ALIGNMENT (lab))
1045 min_align = LABEL_TO_ALIGNMENT (lab);
1046 }
1047 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1048 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1049 insn_shuid = INSN_SHUID (insn);
1050 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1051 memset (&flags, 0, sizeof (flags));
1052 flags.min_align = min_align;
1053 flags.base_after_vec = rel > insn_shuid;
1054 flags.min_after_vec = min > insn_shuid;
1055 flags.max_after_vec = max > insn_shuid;
1056 flags.min_after_base = min > rel;
1057 flags.max_after_base = max > rel;
1058 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1059
1060 if (increasing)
1061 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1062 }
1063 }
1064 #endif /* CASE_VECTOR_SHORTEN_MODE */
1065
1066 /* Compute initial lengths, addresses, and varying flags for each insn. */
1067 int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1068
1069 for (insn_current_address = 0, insn = first;
1070 insn != 0;
1071 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1072 {
1073 uid = INSN_UID (insn);
1074
1075 insn_lengths[uid] = 0;
1076
1077 if (LABEL_P (insn))
1078 {
1079 int log = LABEL_TO_ALIGNMENT (insn);
1080 if (log)
1081 {
1082 int align = 1 << log;
1083 int new_address = (insn_current_address + align - 1) & -align;
1084 insn_lengths[uid] = new_address - insn_current_address;
1085 }
1086 }
1087
1088 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1089
1090 if (NOTE_P (insn) || BARRIER_P (insn)
1091 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1092 continue;
1093 if (INSN_DELETED_P (insn))
1094 continue;
1095
1096 body = PATTERN (insn);
1097 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1098 {
1099 /* This only takes room if read-only data goes into the text
1100 section. */
1101 if (JUMP_TABLES_IN_TEXT_SECTION
1102 || readonly_data_section == text_section)
1103 insn_lengths[uid] = (XVECLEN (body,
1104 GET_CODE (body) == ADDR_DIFF_VEC)
1105 * GET_MODE_SIZE (GET_MODE (body)));
1106 /* Alignment is handled by ADDR_VEC_ALIGN. */
1107 }
1108 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1109 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1110 else if (GET_CODE (body) == SEQUENCE)
1111 {
1112 int i;
1113 int const_delay_slots;
1114 #ifdef DELAY_SLOTS
1115 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1116 #else
1117 const_delay_slots = 0;
1118 #endif
1119 int (*inner_length_fun) (rtx)
1120 = const_delay_slots ? length_fun : insn_default_length;
1121 /* Inside a delay slot sequence, we do not do any branch shortening
1122 if the shortening could change the number of delay slots
1123 of the branch. */
1124 for (i = 0; i < XVECLEN (body, 0); i++)
1125 {
1126 rtx inner_insn = XVECEXP (body, 0, i);
1127 int inner_uid = INSN_UID (inner_insn);
1128 int inner_length;
1129
1130 if (GET_CODE (body) == ASM_INPUT
1131 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1132 inner_length = (asm_insn_count (PATTERN (inner_insn))
1133 * insn_default_length (inner_insn));
1134 else
1135 inner_length = inner_length_fun (inner_insn);
1136
1137 insn_lengths[inner_uid] = inner_length;
1138 if (const_delay_slots)
1139 {
1140 if ((varying_length[inner_uid]
1141 = insn_variable_length_p (inner_insn)) != 0)
1142 varying_length[uid] = 1;
1143 INSN_ADDRESSES (inner_uid) = (insn_current_address
1144 + insn_lengths[uid]);
1145 }
1146 else
1147 varying_length[inner_uid] = 0;
1148 insn_lengths[uid] += inner_length;
1149 }
1150 }
1151 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1152 {
1153 insn_lengths[uid] = length_fun (insn);
1154 varying_length[uid] = insn_variable_length_p (insn);
1155 }
1156
1157 /* If needed, do any adjustment. */
1158 #ifdef ADJUST_INSN_LENGTH
1159 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1160 if (insn_lengths[uid] < 0)
1161 fatal_insn ("negative insn length", insn);
1162 #endif
1163 }
1164
1165 /* Now loop over all the insns finding varying length insns. For each,
1166 get the current insn length. If it has changed, reflect the change.
1167 When nothing changes for a full pass, we are done. */
1168
1169 while (something_changed)
1170 {
1171 something_changed = 0;
1172 insn_current_align = MAX_CODE_ALIGN - 1;
1173 for (insn_current_address = 0, insn = first;
1174 insn != 0;
1175 insn = NEXT_INSN (insn))
1176 {
1177 int new_length;
1178 #ifdef ADJUST_INSN_LENGTH
1179 int tmp_length;
1180 #endif
1181 int length_align;
1182
1183 uid = INSN_UID (insn);
1184
1185 if (LABEL_P (insn))
1186 {
1187 int log = LABEL_TO_ALIGNMENT (insn);
1188 if (log > insn_current_align)
1189 {
1190 int align = 1 << log;
1191 int new_address= (insn_current_address + align - 1) & -align;
1192 insn_lengths[uid] = new_address - insn_current_address;
1193 insn_current_align = log;
1194 insn_current_address = new_address;
1195 }
1196 else
1197 insn_lengths[uid] = 0;
1198 INSN_ADDRESSES (uid) = insn_current_address;
1199 continue;
1200 }
1201
1202 length_align = INSN_LENGTH_ALIGNMENT (insn);
1203 if (length_align < insn_current_align)
1204 insn_current_align = length_align;
1205
1206 insn_last_address = INSN_ADDRESSES (uid);
1207 INSN_ADDRESSES (uid) = insn_current_address;
1208
1209 #ifdef CASE_VECTOR_SHORTEN_MODE
1210 if (optimize && JUMP_P (insn)
1211 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1212 {
1213 rtx body = PATTERN (insn);
1214 int old_length = insn_lengths[uid];
1215 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1216 rtx min_lab = XEXP (XEXP (body, 2), 0);
1217 rtx max_lab = XEXP (XEXP (body, 3), 0);
1218 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1219 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1220 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1221 rtx prev;
1222 int rel_align = 0;
1223 addr_diff_vec_flags flags;
1224 enum machine_mode vec_mode;
1225
1226 /* Avoid automatic aggregate initialization. */
1227 flags = ADDR_DIFF_VEC_FLAGS (body);
1228
1229 /* Try to find a known alignment for rel_lab. */
1230 for (prev = rel_lab;
1231 prev
1232 && ! insn_lengths[INSN_UID (prev)]
1233 && ! (varying_length[INSN_UID (prev)] & 1);
1234 prev = PREV_INSN (prev))
1235 if (varying_length[INSN_UID (prev)] & 2)
1236 {
1237 rel_align = LABEL_TO_ALIGNMENT (prev);
1238 break;
1239 }
1240
1241 /* See the comment on addr_diff_vec_flags in rtl.h for the
1242 meaning of the flags values. base: REL_LAB vec: INSN */
1243 /* Anything after INSN has still addresses from the last
1244 pass; adjust these so that they reflect our current
1245 estimate for this pass. */
1246 if (flags.base_after_vec)
1247 rel_addr += insn_current_address - insn_last_address;
1248 if (flags.min_after_vec)
1249 min_addr += insn_current_address - insn_last_address;
1250 if (flags.max_after_vec)
1251 max_addr += insn_current_address - insn_last_address;
1252 /* We want to know the worst case, i.e. lowest possible value
1253 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1254 its offset is positive, and we have to be wary of code shrink;
1255 otherwise, it is negative, and we have to be vary of code
1256 size increase. */
1257 if (flags.min_after_base)
1258 {
1259 /* If INSN is between REL_LAB and MIN_LAB, the size
1260 changes we are about to make can change the alignment
1261 within the observed offset, therefore we have to break
1262 it up into two parts that are independent. */
1263 if (! flags.base_after_vec && flags.min_after_vec)
1264 {
1265 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1266 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1267 }
1268 else
1269 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1270 }
1271 else
1272 {
1273 if (flags.base_after_vec && ! flags.min_after_vec)
1274 {
1275 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1276 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1277 }
1278 else
1279 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1280 }
1281 /* Likewise, determine the highest lowest possible value
1282 for the offset of MAX_LAB. */
1283 if (flags.max_after_base)
1284 {
1285 if (! flags.base_after_vec && flags.max_after_vec)
1286 {
1287 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1288 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1289 }
1290 else
1291 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1292 }
1293 else
1294 {
1295 if (flags.base_after_vec && ! flags.max_after_vec)
1296 {
1297 max_addr += align_fuzz (max_lab, insn, 0, 0);
1298 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1299 }
1300 else
1301 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1302 }
1303 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1304 max_addr - rel_addr, body);
1305 if (!increasing
1306 || (GET_MODE_SIZE (vec_mode)
1307 >= GET_MODE_SIZE (GET_MODE (body))))
1308 PUT_MODE (body, vec_mode);
1309 if (JUMP_TABLES_IN_TEXT_SECTION
1310 || readonly_data_section == text_section)
1311 {
1312 insn_lengths[uid]
1313 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1314 insn_current_address += insn_lengths[uid];
1315 if (insn_lengths[uid] != old_length)
1316 something_changed = 1;
1317 }
1318
1319 continue;
1320 }
1321 #endif /* CASE_VECTOR_SHORTEN_MODE */
1322
1323 if (! (varying_length[uid]))
1324 {
1325 if (NONJUMP_INSN_P (insn)
1326 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1327 {
1328 int i;
1329
1330 body = PATTERN (insn);
1331 for (i = 0; i < XVECLEN (body, 0); i++)
1332 {
1333 rtx inner_insn = XVECEXP (body, 0, i);
1334 int inner_uid = INSN_UID (inner_insn);
1335
1336 INSN_ADDRESSES (inner_uid) = insn_current_address;
1337
1338 insn_current_address += insn_lengths[inner_uid];
1339 }
1340 }
1341 else
1342 insn_current_address += insn_lengths[uid];
1343
1344 continue;
1345 }
1346
1347 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1348 {
1349 int i;
1350
1351 body = PATTERN (insn);
1352 new_length = 0;
1353 for (i = 0; i < XVECLEN (body, 0); i++)
1354 {
1355 rtx inner_insn = XVECEXP (body, 0, i);
1356 int inner_uid = INSN_UID (inner_insn);
1357 int inner_length;
1358
1359 INSN_ADDRESSES (inner_uid) = insn_current_address;
1360
1361 /* insn_current_length returns 0 for insns with a
1362 non-varying length. */
1363 if (! varying_length[inner_uid])
1364 inner_length = insn_lengths[inner_uid];
1365 else
1366 inner_length = insn_current_length (inner_insn);
1367
1368 if (inner_length != insn_lengths[inner_uid])
1369 {
1370 if (!increasing || inner_length > insn_lengths[inner_uid])
1371 {
1372 insn_lengths[inner_uid] = inner_length;
1373 something_changed = 1;
1374 }
1375 else
1376 inner_length = insn_lengths[inner_uid];
1377 }
1378 insn_current_address += inner_length;
1379 new_length += inner_length;
1380 }
1381 }
1382 else
1383 {
1384 new_length = insn_current_length (insn);
1385 insn_current_address += new_length;
1386 }
1387
1388 #ifdef ADJUST_INSN_LENGTH
1389 /* If needed, do any adjustment. */
1390 tmp_length = new_length;
1391 ADJUST_INSN_LENGTH (insn, new_length);
1392 insn_current_address += (new_length - tmp_length);
1393 #endif
1394
1395 if (new_length != insn_lengths[uid]
1396 && (!increasing || new_length > insn_lengths[uid]))
1397 {
1398 insn_lengths[uid] = new_length;
1399 something_changed = 1;
1400 }
1401 else
1402 insn_current_address += insn_lengths[uid] - new_length;
1403 }
1404 /* For a non-optimizing compile, do only a single pass. */
1405 if (!increasing)
1406 break;
1407 }
1408
1409 free (varying_length);
1410 }
1411
1412 /* Given the body of an INSN known to be generated by an ASM statement, return
1413 the number of machine instructions likely to be generated for this insn.
1414 This is used to compute its length. */
1415
1416 static int
1417 asm_insn_count (rtx body)
1418 {
1419 const char *templ;
1420
1421 if (GET_CODE (body) == ASM_INPUT)
1422 templ = XSTR (body, 0);
1423 else
1424 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1425
1426 return asm_str_count (templ);
1427 }
1428
1429 /* Return the number of machine instructions likely to be generated for the
1430 inline-asm template. */
1431 int
1432 asm_str_count (const char *templ)
1433 {
1434 int count = 1;
1435
1436 if (!*templ)
1437 return 0;
1438
1439 for (; *templ; templ++)
1440 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1441 || *templ == '\n')
1442 count++;
1443
1444 return count;
1445 }
1446 \f
1447 /* ??? This is probably the wrong place for these. */
1448 /* Structure recording the mapping from source file and directory
1449 names at compile time to those to be embedded in debug
1450 information. */
1451 typedef struct debug_prefix_map
1452 {
1453 const char *old_prefix;
1454 const char *new_prefix;
1455 size_t old_len;
1456 size_t new_len;
1457 struct debug_prefix_map *next;
1458 } debug_prefix_map;
1459
1460 /* Linked list of such structures. */
1461 debug_prefix_map *debug_prefix_maps;
1462
1463
1464 /* Record a debug file prefix mapping. ARG is the argument to
1465 -fdebug-prefix-map and must be of the form OLD=NEW. */
1466
1467 void
1468 add_debug_prefix_map (const char *arg)
1469 {
1470 debug_prefix_map *map;
1471 const char *p;
1472
1473 p = strchr (arg, '=');
1474 if (!p)
1475 {
1476 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1477 return;
1478 }
1479 map = XNEW (debug_prefix_map);
1480 map->old_prefix = xstrndup (arg, p - arg);
1481 map->old_len = p - arg;
1482 p++;
1483 map->new_prefix = xstrdup (p);
1484 map->new_len = strlen (p);
1485 map->next = debug_prefix_maps;
1486 debug_prefix_maps = map;
1487 }
1488
1489 /* Perform user-specified mapping of debug filename prefixes. Return
1490 the new name corresponding to FILENAME. */
1491
1492 const char *
1493 remap_debug_filename (const char *filename)
1494 {
1495 debug_prefix_map *map;
1496 char *s;
1497 const char *name;
1498 size_t name_len;
1499
1500 for (map = debug_prefix_maps; map; map = map->next)
1501 if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1502 break;
1503 if (!map)
1504 return filename;
1505 name = filename + map->old_len;
1506 name_len = strlen (name) + 1;
1507 s = (char *) alloca (name_len + map->new_len);
1508 memcpy (s, map->new_prefix, map->new_len);
1509 memcpy (s + map->new_len, name, name_len);
1510 return ggc_strdup (s);
1511 }
1512 \f
1513 /* Return true if DWARF2 debug info can be emitted for DECL. */
1514
1515 static bool
1516 dwarf2_debug_info_emitted_p (tree decl)
1517 {
1518 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1519 return false;
1520
1521 if (DECL_IGNORED_P (decl))
1522 return false;
1523
1524 return true;
1525 }
1526
1527 /* Return scope resulting from combination of S1 and S2. */
1528 static tree
1529 choose_inner_scope (tree s1, tree s2)
1530 {
1531 if (!s1)
1532 return s2;
1533 if (!s2)
1534 return s1;
1535 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1536 return s1;
1537 return s2;
1538 }
1539
1540 /* Emit lexical block notes needed to change scope from S1 to S2. */
1541
1542 static void
1543 change_scope (rtx orig_insn, tree s1, tree s2)
1544 {
1545 rtx insn = orig_insn;
1546 tree com = NULL_TREE;
1547 tree ts1 = s1, ts2 = s2;
1548 tree s;
1549
1550 while (ts1 != ts2)
1551 {
1552 gcc_assert (ts1 && ts2);
1553 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1554 ts1 = BLOCK_SUPERCONTEXT (ts1);
1555 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1556 ts2 = BLOCK_SUPERCONTEXT (ts2);
1557 else
1558 {
1559 ts1 = BLOCK_SUPERCONTEXT (ts1);
1560 ts2 = BLOCK_SUPERCONTEXT (ts2);
1561 }
1562 }
1563 com = ts1;
1564
1565 /* Close scopes. */
1566 s = s1;
1567 while (s != com)
1568 {
1569 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1570 NOTE_BLOCK (note) = s;
1571 s = BLOCK_SUPERCONTEXT (s);
1572 }
1573
1574 /* Open scopes. */
1575 s = s2;
1576 while (s != com)
1577 {
1578 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1579 NOTE_BLOCK (insn) = s;
1580 s = BLOCK_SUPERCONTEXT (s);
1581 }
1582 }
1583
1584 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1585 on the scope tree and the newly reordered instructions. */
1586
1587 static void
1588 reemit_insn_block_notes (void)
1589 {
1590 tree cur_block = DECL_INITIAL (cfun->decl);
1591 rtx insn, note;
1592
1593 insn = get_insns ();
1594 if (!active_insn_p (insn))
1595 insn = next_active_insn (insn);
1596 for (; insn; insn = next_active_insn (insn))
1597 {
1598 tree this_block;
1599
1600 /* Avoid putting scope notes between jump table and its label. */
1601 if (JUMP_TABLE_DATA_P (insn))
1602 continue;
1603
1604 this_block = insn_scope (insn);
1605 /* For sequences compute scope resulting from merging all scopes
1606 of instructions nested inside. */
1607 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1608 {
1609 int i;
1610 rtx body = PATTERN (insn);
1611
1612 this_block = NULL;
1613 for (i = 0; i < XVECLEN (body, 0); i++)
1614 this_block = choose_inner_scope (this_block,
1615 insn_scope (XVECEXP (body, 0, i)));
1616 }
1617 if (! this_block)
1618 {
1619 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1620 continue;
1621 else
1622 this_block = DECL_INITIAL (cfun->decl);
1623 }
1624
1625 if (this_block != cur_block)
1626 {
1627 change_scope (insn, cur_block, this_block);
1628 cur_block = this_block;
1629 }
1630 }
1631
1632 /* change_scope emits before the insn, not after. */
1633 note = emit_note (NOTE_INSN_DELETED);
1634 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1635 delete_insn (note);
1636
1637 reorder_blocks ();
1638 }
1639
1640 /* Output assembler code for the start of a function,
1641 and initialize some of the variables in this file
1642 for the new function. The label for the function and associated
1643 assembler pseudo-ops have already been output in `assemble_start_function'.
1644
1645 FIRST is the first insn of the rtl for the function being compiled.
1646 FILE is the file to write assembler code to.
1647 OPTIMIZE_P is nonzero if we should eliminate redundant
1648 test and compare insns. */
1649
1650 void
1651 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1652 int optimize_p ATTRIBUTE_UNUSED)
1653 {
1654 block_depth = 0;
1655
1656 this_is_asm_operands = 0;
1657
1658 last_filename = LOCATION_FILE (prologue_location);
1659 last_linenum = LOCATION_LINE (prologue_location);
1660 last_discriminator = discriminator = 0;
1661
1662 high_block_linenum = high_function_linenum = last_linenum;
1663
1664 if (!DECL_IGNORED_P (current_function_decl))
1665 debug_hooks->begin_prologue (last_linenum, last_filename);
1666
1667 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1668 dwarf2out_begin_prologue (0, NULL);
1669
1670 #ifdef LEAF_REG_REMAP
1671 if (crtl->uses_only_leaf_regs)
1672 leaf_renumber_regs (first);
1673 #endif
1674
1675 /* The Sun386i and perhaps other machines don't work right
1676 if the profiling code comes after the prologue. */
1677 if (targetm.profile_before_prologue () && crtl->profile)
1678 profile_function (file);
1679
1680 /* If debugging, assign block numbers to all of the blocks in this
1681 function. */
1682 if (write_symbols)
1683 {
1684 reemit_insn_block_notes ();
1685 number_blocks (current_function_decl);
1686 /* We never actually put out begin/end notes for the top-level
1687 block in the function. But, conceptually, that block is
1688 always needed. */
1689 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1690 }
1691
1692 if (warn_frame_larger_than
1693 && get_frame_size () > frame_larger_than_size)
1694 {
1695 /* Issue a warning */
1696 warning (OPT_Wframe_larger_than_,
1697 "the frame size of %wd bytes is larger than %wd bytes",
1698 get_frame_size (), frame_larger_than_size);
1699 }
1700
1701 /* First output the function prologue: code to set up the stack frame. */
1702 targetm.asm_out.function_prologue (file, get_frame_size ());
1703
1704 /* If the machine represents the prologue as RTL, the profiling code must
1705 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1706 #ifdef HAVE_prologue
1707 if (! HAVE_prologue)
1708 #endif
1709 profile_after_prologue (file);
1710 }
1711
1712 static void
1713 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1714 {
1715 if (!targetm.profile_before_prologue () && crtl->profile)
1716 profile_function (file);
1717 }
1718
1719 static void
1720 profile_function (FILE *file ATTRIBUTE_UNUSED)
1721 {
1722 #ifndef NO_PROFILE_COUNTERS
1723 # define NO_PROFILE_COUNTERS 0
1724 #endif
1725 #ifdef ASM_OUTPUT_REG_PUSH
1726 rtx sval = NULL, chain = NULL;
1727
1728 if (cfun->returns_struct)
1729 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1730 true);
1731 if (cfun->static_chain_decl)
1732 chain = targetm.calls.static_chain (current_function_decl, true);
1733 #endif /* ASM_OUTPUT_REG_PUSH */
1734
1735 if (! NO_PROFILE_COUNTERS)
1736 {
1737 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1738 switch_to_section (data_section);
1739 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1740 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1741 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1742 }
1743
1744 switch_to_section (current_function_section ());
1745
1746 #ifdef ASM_OUTPUT_REG_PUSH
1747 if (sval && REG_P (sval))
1748 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1749 if (chain && REG_P (chain))
1750 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1751 #endif
1752
1753 FUNCTION_PROFILER (file, current_function_funcdef_no);
1754
1755 #ifdef ASM_OUTPUT_REG_PUSH
1756 if (chain && REG_P (chain))
1757 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1758 if (sval && REG_P (sval))
1759 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1760 #endif
1761 }
1762
1763 /* Output assembler code for the end of a function.
1764 For clarity, args are same as those of `final_start_function'
1765 even though not all of them are needed. */
1766
1767 void
1768 final_end_function (void)
1769 {
1770 app_disable ();
1771
1772 if (!DECL_IGNORED_P (current_function_decl))
1773 debug_hooks->end_function (high_function_linenum);
1774
1775 /* Finally, output the function epilogue:
1776 code to restore the stack frame and return to the caller. */
1777 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1778
1779 /* And debug output. */
1780 if (!DECL_IGNORED_P (current_function_decl))
1781 debug_hooks->end_epilogue (last_linenum, last_filename);
1782
1783 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1784 && dwarf2out_do_frame ())
1785 dwarf2out_end_epilogue (last_linenum, last_filename);
1786 }
1787 \f
1788
1789 /* Dumper helper for basic block information. FILE is the assembly
1790 output file, and INSN is the instruction being emitted. */
1791
1792 static void
1793 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1794 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1795 {
1796 basic_block bb;
1797
1798 if (!flag_debug_asm)
1799 return;
1800
1801 if (INSN_UID (insn) < bb_map_size
1802 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1803 {
1804 edge e;
1805 edge_iterator ei;
1806
1807 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1808 if (bb->frequency)
1809 fprintf (file, " freq:%d", bb->frequency);
1810 if (bb->count)
1811 fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1812 bb->count);
1813 fprintf (file, " seq:%d", (*bb_seqn)++);
1814 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1815 FOR_EACH_EDGE (e, ei, bb->preds)
1816 {
1817 dump_edge_info (file, e, TDF_DETAILS, 0);
1818 }
1819 fprintf (file, "\n");
1820 }
1821 if (INSN_UID (insn) < bb_map_size
1822 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1823 {
1824 edge e;
1825 edge_iterator ei;
1826
1827 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1828 FOR_EACH_EDGE (e, ei, bb->succs)
1829 {
1830 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1831 }
1832 fprintf (file, "\n");
1833 }
1834 }
1835
1836 /* Output assembler code for some insns: all or part of a function.
1837 For description of args, see `final_start_function', above. */
1838
1839 void
1840 final (rtx first, FILE *file, int optimize_p)
1841 {
1842 rtx insn, next;
1843 int seen = 0;
1844
1845 /* Used for -dA dump. */
1846 basic_block *start_to_bb = NULL;
1847 basic_block *end_to_bb = NULL;
1848 int bb_map_size = 0;
1849 int bb_seqn = 0;
1850
1851 last_ignored_compare = 0;
1852
1853 #ifdef HAVE_cc0
1854 for (insn = first; insn; insn = NEXT_INSN (insn))
1855 {
1856 /* If CC tracking across branches is enabled, record the insn which
1857 jumps to each branch only reached from one place. */
1858 if (optimize_p && JUMP_P (insn))
1859 {
1860 rtx lab = JUMP_LABEL (insn);
1861 if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1862 {
1863 LABEL_REFS (lab) = insn;
1864 }
1865 }
1866 }
1867 #endif
1868
1869 init_recog ();
1870
1871 CC_STATUS_INIT;
1872
1873 if (flag_debug_asm)
1874 {
1875 basic_block bb;
1876
1877 bb_map_size = get_max_uid () + 1;
1878 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1879 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1880
1881 /* There is no cfg for a thunk. */
1882 if (!cfun->is_thunk)
1883 FOR_EACH_BB_REVERSE (bb)
1884 {
1885 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1886 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1887 }
1888 }
1889
1890 /* Output the insns. */
1891 for (insn = first; insn;)
1892 {
1893 if (HAVE_ATTR_length)
1894 {
1895 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1896 {
1897 /* This can be triggered by bugs elsewhere in the compiler if
1898 new insns are created after init_insn_lengths is called. */
1899 gcc_assert (NOTE_P (insn));
1900 insn_current_address = -1;
1901 }
1902 else
1903 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1904 }
1905
1906 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1907 bb_map_size, &bb_seqn);
1908 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1909 }
1910
1911 if (flag_debug_asm)
1912 {
1913 free (start_to_bb);
1914 free (end_to_bb);
1915 }
1916
1917 /* Remove CFI notes, to avoid compare-debug failures. */
1918 for (insn = first; insn; insn = next)
1919 {
1920 next = NEXT_INSN (insn);
1921 if (NOTE_P (insn)
1922 && (NOTE_KIND (insn) == NOTE_INSN_CFI
1923 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
1924 delete_insn (insn);
1925 }
1926 }
1927 \f
1928 const char *
1929 get_insn_template (int code, rtx insn)
1930 {
1931 switch (insn_data[code].output_format)
1932 {
1933 case INSN_OUTPUT_FORMAT_SINGLE:
1934 return insn_data[code].output.single;
1935 case INSN_OUTPUT_FORMAT_MULTI:
1936 return insn_data[code].output.multi[which_alternative];
1937 case INSN_OUTPUT_FORMAT_FUNCTION:
1938 gcc_assert (insn);
1939 return (*insn_data[code].output.function) (recog_data.operand, insn);
1940
1941 default:
1942 gcc_unreachable ();
1943 }
1944 }
1945
1946 /* Emit the appropriate declaration for an alternate-entry-point
1947 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1948 LABEL_KIND != LABEL_NORMAL.
1949
1950 The case fall-through in this function is intentional. */
1951 static void
1952 output_alternate_entry_point (FILE *file, rtx insn)
1953 {
1954 const char *name = LABEL_NAME (insn);
1955
1956 switch (LABEL_KIND (insn))
1957 {
1958 case LABEL_WEAK_ENTRY:
1959 #ifdef ASM_WEAKEN_LABEL
1960 ASM_WEAKEN_LABEL (file, name);
1961 #endif
1962 case LABEL_GLOBAL_ENTRY:
1963 targetm.asm_out.globalize_label (file, name);
1964 case LABEL_STATIC_ENTRY:
1965 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1966 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1967 #endif
1968 ASM_OUTPUT_LABEL (file, name);
1969 break;
1970
1971 case LABEL_NORMAL:
1972 default:
1973 gcc_unreachable ();
1974 }
1975 }
1976
1977 /* Given a CALL_INSN, find and return the nested CALL. */
1978 static rtx
1979 call_from_call_insn (rtx insn)
1980 {
1981 rtx x;
1982 gcc_assert (CALL_P (insn));
1983 x = PATTERN (insn);
1984
1985 while (GET_CODE (x) != CALL)
1986 {
1987 switch (GET_CODE (x))
1988 {
1989 default:
1990 gcc_unreachable ();
1991 case COND_EXEC:
1992 x = COND_EXEC_CODE (x);
1993 break;
1994 case PARALLEL:
1995 x = XVECEXP (x, 0, 0);
1996 break;
1997 case SET:
1998 x = XEXP (x, 1);
1999 break;
2000 }
2001 }
2002 return x;
2003 }
2004
2005 /* The final scan for one insn, INSN.
2006 Args are same as in `final', except that INSN
2007 is the insn being scanned.
2008 Value returned is the next insn to be scanned.
2009
2010 NOPEEPHOLES is the flag to disallow peephole processing (currently
2011 used for within delayed branch sequence output).
2012
2013 SEEN is used to track the end of the prologue, for emitting
2014 debug information. We force the emission of a line note after
2015 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
2016 at the beginning of the second basic block, whichever comes
2017 first. */
2018
2019 rtx
2020 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2021 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2022 {
2023 #ifdef HAVE_cc0
2024 rtx set;
2025 #endif
2026 rtx next;
2027
2028 insn_counter++;
2029
2030 /* Ignore deleted insns. These can occur when we split insns (due to a
2031 template of "#") while not optimizing. */
2032 if (INSN_DELETED_P (insn))
2033 return NEXT_INSN (insn);
2034
2035 switch (GET_CODE (insn))
2036 {
2037 case NOTE:
2038 switch (NOTE_KIND (insn))
2039 {
2040 case NOTE_INSN_DELETED:
2041 break;
2042
2043 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2044 in_cold_section_p = !in_cold_section_p;
2045
2046 if (dwarf2out_do_frame ())
2047 dwarf2out_switch_text_section ();
2048 else if (!DECL_IGNORED_P (current_function_decl))
2049 debug_hooks->switch_text_section ();
2050
2051 switch_to_section (current_function_section ());
2052 targetm.asm_out.function_switched_text_sections (asm_out_file,
2053 current_function_decl,
2054 in_cold_section_p);
2055 break;
2056
2057 case NOTE_INSN_BASIC_BLOCK:
2058 if (targetm.asm_out.unwind_emit)
2059 targetm.asm_out.unwind_emit (asm_out_file, insn);
2060
2061 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
2062 {
2063 *seen |= SEEN_EMITTED;
2064 force_source_line = true;
2065 }
2066 else
2067 *seen |= SEEN_BB;
2068
2069 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2070
2071 break;
2072
2073 case NOTE_INSN_EH_REGION_BEG:
2074 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2075 NOTE_EH_HANDLER (insn));
2076 break;
2077
2078 case NOTE_INSN_EH_REGION_END:
2079 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2080 NOTE_EH_HANDLER (insn));
2081 break;
2082
2083 case NOTE_INSN_PROLOGUE_END:
2084 targetm.asm_out.function_end_prologue (file);
2085 profile_after_prologue (file);
2086
2087 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2088 {
2089 *seen |= SEEN_EMITTED;
2090 force_source_line = true;
2091 }
2092 else
2093 *seen |= SEEN_NOTE;
2094
2095 break;
2096
2097 case NOTE_INSN_EPILOGUE_BEG:
2098 if (!DECL_IGNORED_P (current_function_decl))
2099 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2100 targetm.asm_out.function_begin_epilogue (file);
2101 break;
2102
2103 case NOTE_INSN_CFI:
2104 dwarf2out_emit_cfi (NOTE_CFI (insn));
2105 break;
2106
2107 case NOTE_INSN_CFI_LABEL:
2108 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2109 NOTE_LABEL_NUMBER (insn));
2110 break;
2111
2112 case NOTE_INSN_FUNCTION_BEG:
2113 app_disable ();
2114 if (!DECL_IGNORED_P (current_function_decl))
2115 debug_hooks->end_prologue (last_linenum, last_filename);
2116
2117 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2118 {
2119 *seen |= SEEN_EMITTED;
2120 force_source_line = true;
2121 }
2122 else
2123 *seen |= SEEN_NOTE;
2124
2125 break;
2126
2127 case NOTE_INSN_BLOCK_BEG:
2128 if (debug_info_level == DINFO_LEVEL_NORMAL
2129 || debug_info_level == DINFO_LEVEL_VERBOSE
2130 || write_symbols == DWARF2_DEBUG
2131 || write_symbols == VMS_AND_DWARF2_DEBUG
2132 || write_symbols == VMS_DEBUG)
2133 {
2134 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2135
2136 app_disable ();
2137 ++block_depth;
2138 high_block_linenum = last_linenum;
2139
2140 /* Output debugging info about the symbol-block beginning. */
2141 if (!DECL_IGNORED_P (current_function_decl))
2142 debug_hooks->begin_block (last_linenum, n);
2143
2144 /* Mark this block as output. */
2145 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2146 }
2147 if (write_symbols == DBX_DEBUG
2148 || write_symbols == SDB_DEBUG)
2149 {
2150 location_t *locus_ptr
2151 = block_nonartificial_location (NOTE_BLOCK (insn));
2152
2153 if (locus_ptr != NULL)
2154 {
2155 override_filename = LOCATION_FILE (*locus_ptr);
2156 override_linenum = LOCATION_LINE (*locus_ptr);
2157 }
2158 }
2159 break;
2160
2161 case NOTE_INSN_BLOCK_END:
2162 if (debug_info_level == DINFO_LEVEL_NORMAL
2163 || debug_info_level == DINFO_LEVEL_VERBOSE
2164 || write_symbols == DWARF2_DEBUG
2165 || write_symbols == VMS_AND_DWARF2_DEBUG
2166 || write_symbols == VMS_DEBUG)
2167 {
2168 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2169
2170 app_disable ();
2171
2172 /* End of a symbol-block. */
2173 --block_depth;
2174 gcc_assert (block_depth >= 0);
2175
2176 if (!DECL_IGNORED_P (current_function_decl))
2177 debug_hooks->end_block (high_block_linenum, n);
2178 }
2179 if (write_symbols == DBX_DEBUG
2180 || write_symbols == SDB_DEBUG)
2181 {
2182 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2183 location_t *locus_ptr
2184 = block_nonartificial_location (outer_block);
2185
2186 if (locus_ptr != NULL)
2187 {
2188 override_filename = LOCATION_FILE (*locus_ptr);
2189 override_linenum = LOCATION_LINE (*locus_ptr);
2190 }
2191 else
2192 {
2193 override_filename = NULL;
2194 override_linenum = 0;
2195 }
2196 }
2197 break;
2198
2199 case NOTE_INSN_DELETED_LABEL:
2200 /* Emit the label. We may have deleted the CODE_LABEL because
2201 the label could be proved to be unreachable, though still
2202 referenced (in the form of having its address taken. */
2203 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2204 break;
2205
2206 case NOTE_INSN_DELETED_DEBUG_LABEL:
2207 /* Similarly, but need to use different namespace for it. */
2208 if (CODE_LABEL_NUMBER (insn) != -1)
2209 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2210 break;
2211
2212 case NOTE_INSN_VAR_LOCATION:
2213 case NOTE_INSN_CALL_ARG_LOCATION:
2214 if (!DECL_IGNORED_P (current_function_decl))
2215 debug_hooks->var_location (insn);
2216 break;
2217
2218 default:
2219 gcc_unreachable ();
2220 break;
2221 }
2222 break;
2223
2224 case BARRIER:
2225 break;
2226
2227 case CODE_LABEL:
2228 /* The target port might emit labels in the output function for
2229 some insn, e.g. sh.c output_branchy_insn. */
2230 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2231 {
2232 int align = LABEL_TO_ALIGNMENT (insn);
2233 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2234 int max_skip = LABEL_TO_MAX_SKIP (insn);
2235 #endif
2236
2237 if (align && NEXT_INSN (insn))
2238 {
2239 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2240 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2241 #else
2242 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2243 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2244 #else
2245 ASM_OUTPUT_ALIGN (file, align);
2246 #endif
2247 #endif
2248 }
2249 }
2250 CC_STATUS_INIT;
2251
2252 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2253 debug_hooks->label (insn);
2254
2255 app_disable ();
2256
2257 next = next_nonnote_insn (insn);
2258 /* If this label is followed by a jump-table, make sure we put
2259 the label in the read-only section. Also possibly write the
2260 label and jump table together. */
2261 if (next != 0 && JUMP_TABLE_DATA_P (next))
2262 {
2263 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2264 /* In this case, the case vector is being moved by the
2265 target, so don't output the label at all. Leave that
2266 to the back end macros. */
2267 #else
2268 if (! JUMP_TABLES_IN_TEXT_SECTION)
2269 {
2270 int log_align;
2271
2272 switch_to_section (targetm.asm_out.function_rodata_section
2273 (current_function_decl));
2274
2275 #ifdef ADDR_VEC_ALIGN
2276 log_align = ADDR_VEC_ALIGN (next);
2277 #else
2278 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2279 #endif
2280 ASM_OUTPUT_ALIGN (file, log_align);
2281 }
2282 else
2283 switch_to_section (current_function_section ());
2284
2285 #ifdef ASM_OUTPUT_CASE_LABEL
2286 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2287 next);
2288 #else
2289 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2290 #endif
2291 #endif
2292 break;
2293 }
2294 if (LABEL_ALT_ENTRY_P (insn))
2295 output_alternate_entry_point (file, insn);
2296 else
2297 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2298 break;
2299
2300 default:
2301 {
2302 rtx body = PATTERN (insn);
2303 int insn_code_number;
2304 const char *templ;
2305 bool is_stmt;
2306
2307 /* Reset this early so it is correct for ASM statements. */
2308 current_insn_predicate = NULL_RTX;
2309
2310 /* An INSN, JUMP_INSN or CALL_INSN.
2311 First check for special kinds that recog doesn't recognize. */
2312
2313 if (GET_CODE (body) == USE /* These are just declarations. */
2314 || GET_CODE (body) == CLOBBER)
2315 break;
2316
2317 #ifdef HAVE_cc0
2318 {
2319 /* If there is a REG_CC_SETTER note on this insn, it means that
2320 the setting of the condition code was done in the delay slot
2321 of the insn that branched here. So recover the cc status
2322 from the insn that set it. */
2323
2324 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2325 if (note)
2326 {
2327 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2328 cc_prev_status = cc_status;
2329 }
2330 }
2331 #endif
2332
2333 /* Detect insns that are really jump-tables
2334 and output them as such. */
2335
2336 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2337 {
2338 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2339 int vlen, idx;
2340 #endif
2341
2342 if (! JUMP_TABLES_IN_TEXT_SECTION)
2343 switch_to_section (targetm.asm_out.function_rodata_section
2344 (current_function_decl));
2345 else
2346 switch_to_section (current_function_section ());
2347
2348 app_disable ();
2349
2350 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2351 if (GET_CODE (body) == ADDR_VEC)
2352 {
2353 #ifdef ASM_OUTPUT_ADDR_VEC
2354 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2355 #else
2356 gcc_unreachable ();
2357 #endif
2358 }
2359 else
2360 {
2361 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2362 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2363 #else
2364 gcc_unreachable ();
2365 #endif
2366 }
2367 #else
2368 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2369 for (idx = 0; idx < vlen; idx++)
2370 {
2371 if (GET_CODE (body) == ADDR_VEC)
2372 {
2373 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2374 ASM_OUTPUT_ADDR_VEC_ELT
2375 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2376 #else
2377 gcc_unreachable ();
2378 #endif
2379 }
2380 else
2381 {
2382 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2383 ASM_OUTPUT_ADDR_DIFF_ELT
2384 (file,
2385 body,
2386 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2387 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2388 #else
2389 gcc_unreachable ();
2390 #endif
2391 }
2392 }
2393 #ifdef ASM_OUTPUT_CASE_END
2394 ASM_OUTPUT_CASE_END (file,
2395 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2396 insn);
2397 #endif
2398 #endif
2399
2400 switch_to_section (current_function_section ());
2401
2402 break;
2403 }
2404 /* Output this line note if it is the first or the last line
2405 note in a row. */
2406 if (!DECL_IGNORED_P (current_function_decl)
2407 && notice_source_line (insn, &is_stmt))
2408 (*debug_hooks->source_line) (last_linenum, last_filename,
2409 last_discriminator, is_stmt);
2410
2411 if (GET_CODE (body) == ASM_INPUT)
2412 {
2413 const char *string = XSTR (body, 0);
2414
2415 /* There's no telling what that did to the condition codes. */
2416 CC_STATUS_INIT;
2417
2418 if (string[0])
2419 {
2420 expanded_location loc;
2421
2422 app_enable ();
2423 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2424 if (*loc.file && loc.line)
2425 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2426 ASM_COMMENT_START, loc.line, loc.file);
2427 fprintf (asm_out_file, "\t%s\n", string);
2428 #if HAVE_AS_LINE_ZERO
2429 if (*loc.file && loc.line)
2430 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2431 #endif
2432 }
2433 break;
2434 }
2435
2436 /* Detect `asm' construct with operands. */
2437 if (asm_noperands (body) >= 0)
2438 {
2439 unsigned int noperands = asm_noperands (body);
2440 rtx *ops = XALLOCAVEC (rtx, noperands);
2441 const char *string;
2442 location_t loc;
2443 expanded_location expanded;
2444
2445 /* There's no telling what that did to the condition codes. */
2446 CC_STATUS_INIT;
2447
2448 /* Get out the operand values. */
2449 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2450 /* Inhibit dying on what would otherwise be compiler bugs. */
2451 insn_noperands = noperands;
2452 this_is_asm_operands = insn;
2453 expanded = expand_location (loc);
2454
2455 #ifdef FINAL_PRESCAN_INSN
2456 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2457 #endif
2458
2459 /* Output the insn using them. */
2460 if (string[0])
2461 {
2462 app_enable ();
2463 if (expanded.file && expanded.line)
2464 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2465 ASM_COMMENT_START, expanded.line, expanded.file);
2466 output_asm_insn (string, ops);
2467 #if HAVE_AS_LINE_ZERO
2468 if (expanded.file && expanded.line)
2469 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2470 #endif
2471 }
2472
2473 if (targetm.asm_out.final_postscan_insn)
2474 targetm.asm_out.final_postscan_insn (file, insn, ops,
2475 insn_noperands);
2476
2477 this_is_asm_operands = 0;
2478 break;
2479 }
2480
2481 app_disable ();
2482
2483 if (GET_CODE (body) == SEQUENCE)
2484 {
2485 /* A delayed-branch sequence */
2486 int i;
2487
2488 final_sequence = body;
2489
2490 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2491 force the restoration of a comparison that was previously
2492 thought unnecessary. If that happens, cancel this sequence
2493 and cause that insn to be restored. */
2494
2495 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2496 if (next != XVECEXP (body, 0, 1))
2497 {
2498 final_sequence = 0;
2499 return next;
2500 }
2501
2502 for (i = 1; i < XVECLEN (body, 0); i++)
2503 {
2504 rtx insn = XVECEXP (body, 0, i);
2505 rtx next = NEXT_INSN (insn);
2506 /* We loop in case any instruction in a delay slot gets
2507 split. */
2508 do
2509 insn = final_scan_insn (insn, file, 0, 1, seen);
2510 while (insn != next);
2511 }
2512 #ifdef DBR_OUTPUT_SEQEND
2513 DBR_OUTPUT_SEQEND (file);
2514 #endif
2515 final_sequence = 0;
2516
2517 /* If the insn requiring the delay slot was a CALL_INSN, the
2518 insns in the delay slot are actually executed before the
2519 called function. Hence we don't preserve any CC-setting
2520 actions in these insns and the CC must be marked as being
2521 clobbered by the function. */
2522 if (CALL_P (XVECEXP (body, 0, 0)))
2523 {
2524 CC_STATUS_INIT;
2525 }
2526 break;
2527 }
2528
2529 /* We have a real machine instruction as rtl. */
2530
2531 body = PATTERN (insn);
2532
2533 #ifdef HAVE_cc0
2534 set = single_set (insn);
2535
2536 /* Check for redundant test and compare instructions
2537 (when the condition codes are already set up as desired).
2538 This is done only when optimizing; if not optimizing,
2539 it should be possible for the user to alter a variable
2540 with the debugger in between statements
2541 and the next statement should reexamine the variable
2542 to compute the condition codes. */
2543
2544 if (optimize_p)
2545 {
2546 if (set
2547 && GET_CODE (SET_DEST (set)) == CC0
2548 && insn != last_ignored_compare)
2549 {
2550 rtx src1, src2;
2551 if (GET_CODE (SET_SRC (set)) == SUBREG)
2552 SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2553
2554 src1 = SET_SRC (set);
2555 src2 = NULL_RTX;
2556 if (GET_CODE (SET_SRC (set)) == COMPARE)
2557 {
2558 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2559 XEXP (SET_SRC (set), 0)
2560 = alter_subreg (&XEXP (SET_SRC (set), 0), true);
2561 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2562 XEXP (SET_SRC (set), 1)
2563 = alter_subreg (&XEXP (SET_SRC (set), 1), true);
2564 if (XEXP (SET_SRC (set), 1)
2565 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2566 src2 = XEXP (SET_SRC (set), 0);
2567 }
2568 if ((cc_status.value1 != 0
2569 && rtx_equal_p (src1, cc_status.value1))
2570 || (cc_status.value2 != 0
2571 && rtx_equal_p (src1, cc_status.value2))
2572 || (src2 != 0 && cc_status.value1 != 0
2573 && rtx_equal_p (src2, cc_status.value1))
2574 || (src2 != 0 && cc_status.value2 != 0
2575 && rtx_equal_p (src2, cc_status.value2)))
2576 {
2577 /* Don't delete insn if it has an addressing side-effect. */
2578 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2579 /* or if anything in it is volatile. */
2580 && ! volatile_refs_p (PATTERN (insn)))
2581 {
2582 /* We don't really delete the insn; just ignore it. */
2583 last_ignored_compare = insn;
2584 break;
2585 }
2586 }
2587 }
2588 }
2589
2590 /* If this is a conditional branch, maybe modify it
2591 if the cc's are in a nonstandard state
2592 so that it accomplishes the same thing that it would
2593 do straightforwardly if the cc's were set up normally. */
2594
2595 if (cc_status.flags != 0
2596 && JUMP_P (insn)
2597 && GET_CODE (body) == SET
2598 && SET_DEST (body) == pc_rtx
2599 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2600 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2601 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2602 {
2603 /* This function may alter the contents of its argument
2604 and clear some of the cc_status.flags bits.
2605 It may also return 1 meaning condition now always true
2606 or -1 meaning condition now always false
2607 or 2 meaning condition nontrivial but altered. */
2608 int result = alter_cond (XEXP (SET_SRC (body), 0));
2609 /* If condition now has fixed value, replace the IF_THEN_ELSE
2610 with its then-operand or its else-operand. */
2611 if (result == 1)
2612 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2613 if (result == -1)
2614 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2615
2616 /* The jump is now either unconditional or a no-op.
2617 If it has become a no-op, don't try to output it.
2618 (It would not be recognized.) */
2619 if (SET_SRC (body) == pc_rtx)
2620 {
2621 delete_insn (insn);
2622 break;
2623 }
2624 else if (ANY_RETURN_P (SET_SRC (body)))
2625 /* Replace (set (pc) (return)) with (return). */
2626 PATTERN (insn) = body = SET_SRC (body);
2627
2628 /* Rerecognize the instruction if it has changed. */
2629 if (result != 0)
2630 INSN_CODE (insn) = -1;
2631 }
2632
2633 /* If this is a conditional trap, maybe modify it if the cc's
2634 are in a nonstandard state so that it accomplishes the same
2635 thing that it would do straightforwardly if the cc's were
2636 set up normally. */
2637 if (cc_status.flags != 0
2638 && NONJUMP_INSN_P (insn)
2639 && GET_CODE (body) == TRAP_IF
2640 && COMPARISON_P (TRAP_CONDITION (body))
2641 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2642 {
2643 /* This function may alter the contents of its argument
2644 and clear some of the cc_status.flags bits.
2645 It may also return 1 meaning condition now always true
2646 or -1 meaning condition now always false
2647 or 2 meaning condition nontrivial but altered. */
2648 int result = alter_cond (TRAP_CONDITION (body));
2649
2650 /* If TRAP_CONDITION has become always false, delete the
2651 instruction. */
2652 if (result == -1)
2653 {
2654 delete_insn (insn);
2655 break;
2656 }
2657
2658 /* If TRAP_CONDITION has become always true, replace
2659 TRAP_CONDITION with const_true_rtx. */
2660 if (result == 1)
2661 TRAP_CONDITION (body) = const_true_rtx;
2662
2663 /* Rerecognize the instruction if it has changed. */
2664 if (result != 0)
2665 INSN_CODE (insn) = -1;
2666 }
2667
2668 /* Make same adjustments to instructions that examine the
2669 condition codes without jumping and instructions that
2670 handle conditional moves (if this machine has either one). */
2671
2672 if (cc_status.flags != 0
2673 && set != 0)
2674 {
2675 rtx cond_rtx, then_rtx, else_rtx;
2676
2677 if (!JUMP_P (insn)
2678 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2679 {
2680 cond_rtx = XEXP (SET_SRC (set), 0);
2681 then_rtx = XEXP (SET_SRC (set), 1);
2682 else_rtx = XEXP (SET_SRC (set), 2);
2683 }
2684 else
2685 {
2686 cond_rtx = SET_SRC (set);
2687 then_rtx = const_true_rtx;
2688 else_rtx = const0_rtx;
2689 }
2690
2691 if (COMPARISON_P (cond_rtx)
2692 && XEXP (cond_rtx, 0) == cc0_rtx)
2693 {
2694 int result;
2695 result = alter_cond (cond_rtx);
2696 if (result == 1)
2697 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2698 else if (result == -1)
2699 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2700 else if (result == 2)
2701 INSN_CODE (insn) = -1;
2702 if (SET_DEST (set) == SET_SRC (set))
2703 delete_insn (insn);
2704 }
2705 }
2706
2707 #endif
2708
2709 #ifdef HAVE_peephole
2710 /* Do machine-specific peephole optimizations if desired. */
2711
2712 if (optimize_p && !flag_no_peephole && !nopeepholes)
2713 {
2714 rtx next = peephole (insn);
2715 /* When peepholing, if there were notes within the peephole,
2716 emit them before the peephole. */
2717 if (next != 0 && next != NEXT_INSN (insn))
2718 {
2719 rtx note, prev = PREV_INSN (insn);
2720
2721 for (note = NEXT_INSN (insn); note != next;
2722 note = NEXT_INSN (note))
2723 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2724
2725 /* Put the notes in the proper position for a later
2726 rescan. For example, the SH target can do this
2727 when generating a far jump in a delayed branch
2728 sequence. */
2729 note = NEXT_INSN (insn);
2730 PREV_INSN (note) = prev;
2731 NEXT_INSN (prev) = note;
2732 NEXT_INSN (PREV_INSN (next)) = insn;
2733 PREV_INSN (insn) = PREV_INSN (next);
2734 NEXT_INSN (insn) = next;
2735 PREV_INSN (next) = insn;
2736 }
2737
2738 /* PEEPHOLE might have changed this. */
2739 body = PATTERN (insn);
2740 }
2741 #endif
2742
2743 /* Try to recognize the instruction.
2744 If successful, verify that the operands satisfy the
2745 constraints for the instruction. Crash if they don't,
2746 since `reload' should have changed them so that they do. */
2747
2748 insn_code_number = recog_memoized (insn);
2749 cleanup_subreg_operands (insn);
2750
2751 /* Dump the insn in the assembly for debugging (-dAP).
2752 If the final dump is requested as slim RTL, dump slim
2753 RTL to the assembly file also. */
2754 if (flag_dump_rtl_in_asm)
2755 {
2756 print_rtx_head = ASM_COMMENT_START;
2757 if (! (dump_flags & TDF_SLIM))
2758 print_rtl_single (asm_out_file, insn);
2759 else
2760 dump_insn_slim (asm_out_file, insn);
2761 print_rtx_head = "";
2762 }
2763
2764 if (! constrain_operands_cached (1))
2765 fatal_insn_not_found (insn);
2766
2767 /* Some target machines need to prescan each insn before
2768 it is output. */
2769
2770 #ifdef FINAL_PRESCAN_INSN
2771 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2772 #endif
2773
2774 if (targetm.have_conditional_execution ()
2775 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2776 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2777
2778 #ifdef HAVE_cc0
2779 cc_prev_status = cc_status;
2780
2781 /* Update `cc_status' for this instruction.
2782 The instruction's output routine may change it further.
2783 If the output routine for a jump insn needs to depend
2784 on the cc status, it should look at cc_prev_status. */
2785
2786 NOTICE_UPDATE_CC (body, insn);
2787 #endif
2788
2789 current_output_insn = debug_insn = insn;
2790
2791 /* Find the proper template for this insn. */
2792 templ = get_insn_template (insn_code_number, insn);
2793
2794 /* If the C code returns 0, it means that it is a jump insn
2795 which follows a deleted test insn, and that test insn
2796 needs to be reinserted. */
2797 if (templ == 0)
2798 {
2799 rtx prev;
2800
2801 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2802
2803 /* We have already processed the notes between the setter and
2804 the user. Make sure we don't process them again, this is
2805 particularly important if one of the notes is a block
2806 scope note or an EH note. */
2807 for (prev = insn;
2808 prev != last_ignored_compare;
2809 prev = PREV_INSN (prev))
2810 {
2811 if (NOTE_P (prev))
2812 delete_insn (prev); /* Use delete_note. */
2813 }
2814
2815 return prev;
2816 }
2817
2818 /* If the template is the string "#", it means that this insn must
2819 be split. */
2820 if (templ[0] == '#' && templ[1] == '\0')
2821 {
2822 rtx new_rtx = try_split (body, insn, 0);
2823
2824 /* If we didn't split the insn, go away. */
2825 if (new_rtx == insn && PATTERN (new_rtx) == body)
2826 fatal_insn ("could not split insn", insn);
2827
2828 /* If we have a length attribute, this instruction should have
2829 been split in shorten_branches, to ensure that we would have
2830 valid length info for the splitees. */
2831 gcc_assert (!HAVE_ATTR_length);
2832
2833 return new_rtx;
2834 }
2835
2836 /* ??? This will put the directives in the wrong place if
2837 get_insn_template outputs assembly directly. However calling it
2838 before get_insn_template breaks if the insns is split. */
2839 if (targetm.asm_out.unwind_emit_before_insn
2840 && targetm.asm_out.unwind_emit)
2841 targetm.asm_out.unwind_emit (asm_out_file, insn);
2842
2843 if (CALL_P (insn))
2844 {
2845 rtx x = call_from_call_insn (insn);
2846 x = XEXP (x, 0);
2847 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2848 {
2849 tree t;
2850 x = XEXP (x, 0);
2851 t = SYMBOL_REF_DECL (x);
2852 if (t)
2853 assemble_external (t);
2854 }
2855 if (!DECL_IGNORED_P (current_function_decl))
2856 debug_hooks->var_location (insn);
2857 }
2858
2859 /* Output assembler code from the template. */
2860 output_asm_insn (templ, recog_data.operand);
2861
2862 /* Some target machines need to postscan each insn after
2863 it is output. */
2864 if (targetm.asm_out.final_postscan_insn)
2865 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2866 recog_data.n_operands);
2867
2868 if (!targetm.asm_out.unwind_emit_before_insn
2869 && targetm.asm_out.unwind_emit)
2870 targetm.asm_out.unwind_emit (asm_out_file, insn);
2871
2872 current_output_insn = debug_insn = 0;
2873 }
2874 }
2875 return NEXT_INSN (insn);
2876 }
2877 \f
2878 /* Return whether a source line note needs to be emitted before INSN.
2879 Sets IS_STMT to TRUE if the line should be marked as a possible
2880 breakpoint location. */
2881
2882 static bool
2883 notice_source_line (rtx insn, bool *is_stmt)
2884 {
2885 const char *filename;
2886 int linenum;
2887
2888 if (override_filename)
2889 {
2890 filename = override_filename;
2891 linenum = override_linenum;
2892 }
2893 else
2894 {
2895 filename = insn_file (insn);
2896 linenum = insn_line (insn);
2897 }
2898
2899 if (filename == NULL)
2900 return false;
2901
2902 if (force_source_line
2903 || filename != last_filename
2904 || last_linenum != linenum)
2905 {
2906 force_source_line = false;
2907 last_filename = filename;
2908 last_linenum = linenum;
2909 last_discriminator = discriminator;
2910 *is_stmt = true;
2911 high_block_linenum = MAX (last_linenum, high_block_linenum);
2912 high_function_linenum = MAX (last_linenum, high_function_linenum);
2913 return true;
2914 }
2915
2916 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2917 {
2918 /* If the discriminator changed, but the line number did not,
2919 output the line table entry with is_stmt false so the
2920 debugger does not treat this as a breakpoint location. */
2921 last_discriminator = discriminator;
2922 *is_stmt = false;
2923 return true;
2924 }
2925
2926 return false;
2927 }
2928 \f
2929 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2930 directly to the desired hard register. */
2931
2932 void
2933 cleanup_subreg_operands (rtx insn)
2934 {
2935 int i;
2936 bool changed = false;
2937 extract_insn_cached (insn);
2938 for (i = 0; i < recog_data.n_operands; i++)
2939 {
2940 /* The following test cannot use recog_data.operand when testing
2941 for a SUBREG: the underlying object might have been changed
2942 already if we are inside a match_operator expression that
2943 matches the else clause. Instead we test the underlying
2944 expression directly. */
2945 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2946 {
2947 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
2948 changed = true;
2949 }
2950 else if (GET_CODE (recog_data.operand[i]) == PLUS
2951 || GET_CODE (recog_data.operand[i]) == MULT
2952 || MEM_P (recog_data.operand[i]))
2953 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2954 }
2955
2956 for (i = 0; i < recog_data.n_dups; i++)
2957 {
2958 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2959 {
2960 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
2961 changed = true;
2962 }
2963 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2964 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2965 || MEM_P (*recog_data.dup_loc[i]))
2966 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2967 }
2968 if (changed)
2969 df_insn_rescan (insn);
2970 }
2971
2972 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
2973 the thing it is a subreg of. Do it anyway if FINAL_P. */
2974
2975 rtx
2976 alter_subreg (rtx *xp, bool final_p)
2977 {
2978 rtx x = *xp;
2979 rtx y = SUBREG_REG (x);
2980
2981 /* simplify_subreg does not remove subreg from volatile references.
2982 We are required to. */
2983 if (MEM_P (y))
2984 {
2985 int offset = SUBREG_BYTE (x);
2986
2987 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2988 contains 0 instead of the proper offset. See simplify_subreg. */
2989 if (offset == 0
2990 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2991 {
2992 int difference = GET_MODE_SIZE (GET_MODE (y))
2993 - GET_MODE_SIZE (GET_MODE (x));
2994 if (WORDS_BIG_ENDIAN)
2995 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2996 if (BYTES_BIG_ENDIAN)
2997 offset += difference % UNITS_PER_WORD;
2998 }
2999
3000 if (final_p)
3001 *xp = adjust_address (y, GET_MODE (x), offset);
3002 else
3003 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3004 }
3005 else
3006 {
3007 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3008 SUBREG_BYTE (x));
3009
3010 if (new_rtx != 0)
3011 *xp = new_rtx;
3012 else if (final_p && REG_P (y))
3013 {
3014 /* Simplify_subreg can't handle some REG cases, but we have to. */
3015 unsigned int regno;
3016 HOST_WIDE_INT offset;
3017
3018 regno = subreg_regno (x);
3019 if (subreg_lowpart_p (x))
3020 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3021 else
3022 offset = SUBREG_BYTE (x);
3023 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3024 }
3025 }
3026
3027 return *xp;
3028 }
3029
3030 /* Do alter_subreg on all the SUBREGs contained in X. */
3031
3032 static rtx
3033 walk_alter_subreg (rtx *xp, bool *changed)
3034 {
3035 rtx x = *xp;
3036 switch (GET_CODE (x))
3037 {
3038 case PLUS:
3039 case MULT:
3040 case AND:
3041 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3042 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3043 break;
3044
3045 case MEM:
3046 case ZERO_EXTEND:
3047 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3048 break;
3049
3050 case SUBREG:
3051 *changed = true;
3052 return alter_subreg (xp, true);
3053
3054 default:
3055 break;
3056 }
3057
3058 return *xp;
3059 }
3060 \f
3061 #ifdef HAVE_cc0
3062
3063 /* Given BODY, the body of a jump instruction, alter the jump condition
3064 as required by the bits that are set in cc_status.flags.
3065 Not all of the bits there can be handled at this level in all cases.
3066
3067 The value is normally 0.
3068 1 means that the condition has become always true.
3069 -1 means that the condition has become always false.
3070 2 means that COND has been altered. */
3071
3072 static int
3073 alter_cond (rtx cond)
3074 {
3075 int value = 0;
3076
3077 if (cc_status.flags & CC_REVERSED)
3078 {
3079 value = 2;
3080 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3081 }
3082
3083 if (cc_status.flags & CC_INVERTED)
3084 {
3085 value = 2;
3086 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3087 }
3088
3089 if (cc_status.flags & CC_NOT_POSITIVE)
3090 switch (GET_CODE (cond))
3091 {
3092 case LE:
3093 case LEU:
3094 case GEU:
3095 /* Jump becomes unconditional. */
3096 return 1;
3097
3098 case GT:
3099 case GTU:
3100 case LTU:
3101 /* Jump becomes no-op. */
3102 return -1;
3103
3104 case GE:
3105 PUT_CODE (cond, EQ);
3106 value = 2;
3107 break;
3108
3109 case LT:
3110 PUT_CODE (cond, NE);
3111 value = 2;
3112 break;
3113
3114 default:
3115 break;
3116 }
3117
3118 if (cc_status.flags & CC_NOT_NEGATIVE)
3119 switch (GET_CODE (cond))
3120 {
3121 case GE:
3122 case GEU:
3123 /* Jump becomes unconditional. */
3124 return 1;
3125
3126 case LT:
3127 case LTU:
3128 /* Jump becomes no-op. */
3129 return -1;
3130
3131 case LE:
3132 case LEU:
3133 PUT_CODE (cond, EQ);
3134 value = 2;
3135 break;
3136
3137 case GT:
3138 case GTU:
3139 PUT_CODE (cond, NE);
3140 value = 2;
3141 break;
3142
3143 default:
3144 break;
3145 }
3146
3147 if (cc_status.flags & CC_NO_OVERFLOW)
3148 switch (GET_CODE (cond))
3149 {
3150 case GEU:
3151 /* Jump becomes unconditional. */
3152 return 1;
3153
3154 case LEU:
3155 PUT_CODE (cond, EQ);
3156 value = 2;
3157 break;
3158
3159 case GTU:
3160 PUT_CODE (cond, NE);
3161 value = 2;
3162 break;
3163
3164 case LTU:
3165 /* Jump becomes no-op. */
3166 return -1;
3167
3168 default:
3169 break;
3170 }
3171
3172 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3173 switch (GET_CODE (cond))
3174 {
3175 default:
3176 gcc_unreachable ();
3177
3178 case NE:
3179 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3180 value = 2;
3181 break;
3182
3183 case EQ:
3184 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3185 value = 2;
3186 break;
3187 }
3188
3189 if (cc_status.flags & CC_NOT_SIGNED)
3190 /* The flags are valid if signed condition operators are converted
3191 to unsigned. */
3192 switch (GET_CODE (cond))
3193 {
3194 case LE:
3195 PUT_CODE (cond, LEU);
3196 value = 2;
3197 break;
3198
3199 case LT:
3200 PUT_CODE (cond, LTU);
3201 value = 2;
3202 break;
3203
3204 case GT:
3205 PUT_CODE (cond, GTU);
3206 value = 2;
3207 break;
3208
3209 case GE:
3210 PUT_CODE (cond, GEU);
3211 value = 2;
3212 break;
3213
3214 default:
3215 break;
3216 }
3217
3218 return value;
3219 }
3220 #endif
3221 \f
3222 /* Report inconsistency between the assembler template and the operands.
3223 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3224
3225 void
3226 output_operand_lossage (const char *cmsgid, ...)
3227 {
3228 char *fmt_string;
3229 char *new_message;
3230 const char *pfx_str;
3231 va_list ap;
3232
3233 va_start (ap, cmsgid);
3234
3235 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3236 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3237 vasprintf (&new_message, fmt_string, ap);
3238
3239 if (this_is_asm_operands)
3240 error_for_asm (this_is_asm_operands, "%s", new_message);
3241 else
3242 internal_error ("%s", new_message);
3243
3244 free (fmt_string);
3245 free (new_message);
3246 va_end (ap);
3247 }
3248 \f
3249 /* Output of assembler code from a template, and its subroutines. */
3250
3251 /* Annotate the assembly with a comment describing the pattern and
3252 alternative used. */
3253
3254 static void
3255 output_asm_name (void)
3256 {
3257 if (debug_insn)
3258 {
3259 int num = INSN_CODE (debug_insn);
3260 fprintf (asm_out_file, "\t%s %d\t%s",
3261 ASM_COMMENT_START, INSN_UID (debug_insn),
3262 insn_data[num].name);
3263 if (insn_data[num].n_alternatives > 1)
3264 fprintf (asm_out_file, "/%d", which_alternative + 1);
3265
3266 if (HAVE_ATTR_length)
3267 fprintf (asm_out_file, "\t[length = %d]",
3268 get_attr_length (debug_insn));
3269
3270 /* Clear this so only the first assembler insn
3271 of any rtl insn will get the special comment for -dp. */
3272 debug_insn = 0;
3273 }
3274 }
3275
3276 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3277 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3278 corresponds to the address of the object and 0 if to the object. */
3279
3280 static tree
3281 get_mem_expr_from_op (rtx op, int *paddressp)
3282 {
3283 tree expr;
3284 int inner_addressp;
3285
3286 *paddressp = 0;
3287
3288 if (REG_P (op))
3289 return REG_EXPR (op);
3290 else if (!MEM_P (op))
3291 return 0;
3292
3293 if (MEM_EXPR (op) != 0)
3294 return MEM_EXPR (op);
3295
3296 /* Otherwise we have an address, so indicate it and look at the address. */
3297 *paddressp = 1;
3298 op = XEXP (op, 0);
3299
3300 /* First check if we have a decl for the address, then look at the right side
3301 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3302 But don't allow the address to itself be indirect. */
3303 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3304 return expr;
3305 else if (GET_CODE (op) == PLUS
3306 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3307 return expr;
3308
3309 while (UNARY_P (op)
3310 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3311 op = XEXP (op, 0);
3312
3313 expr = get_mem_expr_from_op (op, &inner_addressp);
3314 return inner_addressp ? 0 : expr;
3315 }
3316
3317 /* Output operand names for assembler instructions. OPERANDS is the
3318 operand vector, OPORDER is the order to write the operands, and NOPS
3319 is the number of operands to write. */
3320
3321 static void
3322 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3323 {
3324 int wrote = 0;
3325 int i;
3326
3327 for (i = 0; i < nops; i++)
3328 {
3329 int addressp;
3330 rtx op = operands[oporder[i]];
3331 tree expr = get_mem_expr_from_op (op, &addressp);
3332
3333 fprintf (asm_out_file, "%c%s",
3334 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3335 wrote = 1;
3336 if (expr)
3337 {
3338 fprintf (asm_out_file, "%s",
3339 addressp ? "*" : "");
3340 print_mem_expr (asm_out_file, expr);
3341 wrote = 1;
3342 }
3343 else if (REG_P (op) && ORIGINAL_REGNO (op)
3344 && ORIGINAL_REGNO (op) != REGNO (op))
3345 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3346 }
3347 }
3348
3349 #ifdef ASSEMBLER_DIALECT
3350 /* Helper function to parse assembler dialects in the asm string.
3351 This is called from output_asm_insn and asm_fprintf. */
3352 static const char *
3353 do_assembler_dialects (const char *p, int *dialect)
3354 {
3355 char c = *(p - 1);
3356
3357 switch (c)
3358 {
3359 case '{':
3360 {
3361 int i;
3362
3363 if (*dialect)
3364 output_operand_lossage ("nested assembly dialect alternatives");
3365 else
3366 *dialect = 1;
3367
3368 /* If we want the first dialect, do nothing. Otherwise, skip
3369 DIALECT_NUMBER of strings ending with '|'. */
3370 for (i = 0; i < dialect_number; i++)
3371 {
3372 while (*p && *p != '}' && *p++ != '|')
3373 ;
3374 if (*p == '}')
3375 break;
3376 }
3377
3378 if (*p == '\0')
3379 output_operand_lossage ("unterminated assembly dialect alternative");
3380 }
3381 break;
3382
3383 case '|':
3384 if (*dialect)
3385 {
3386 /* Skip to close brace. */
3387 do
3388 {
3389 if (*p == '\0')
3390 {
3391 output_operand_lossage ("unterminated assembly dialect alternative");
3392 break;
3393 }
3394 }
3395 while (*p++ != '}');
3396 *dialect = 0;
3397 }
3398 else
3399 putc (c, asm_out_file);
3400 break;
3401
3402 case '}':
3403 if (! *dialect)
3404 putc (c, asm_out_file);
3405 *dialect = 0;
3406 break;
3407 default:
3408 gcc_unreachable ();
3409 }
3410
3411 return p;
3412 }
3413 #endif
3414
3415 /* Output text from TEMPLATE to the assembler output file,
3416 obeying %-directions to substitute operands taken from
3417 the vector OPERANDS.
3418
3419 %N (for N a digit) means print operand N in usual manner.
3420 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3421 and print the label name with no punctuation.
3422 %cN means require operand N to be a constant
3423 and print the constant expression with no punctuation.
3424 %aN means expect operand N to be a memory address
3425 (not a memory reference!) and print a reference
3426 to that address.
3427 %nN means expect operand N to be a constant
3428 and print a constant expression for minus the value
3429 of the operand, with no other punctuation. */
3430
3431 void
3432 output_asm_insn (const char *templ, rtx *operands)
3433 {
3434 const char *p;
3435 int c;
3436 #ifdef ASSEMBLER_DIALECT
3437 int dialect = 0;
3438 #endif
3439 int oporder[MAX_RECOG_OPERANDS];
3440 char opoutput[MAX_RECOG_OPERANDS];
3441 int ops = 0;
3442
3443 /* An insn may return a null string template
3444 in a case where no assembler code is needed. */
3445 if (*templ == 0)
3446 return;
3447
3448 memset (opoutput, 0, sizeof opoutput);
3449 p = templ;
3450 putc ('\t', asm_out_file);
3451
3452 #ifdef ASM_OUTPUT_OPCODE
3453 ASM_OUTPUT_OPCODE (asm_out_file, p);
3454 #endif
3455
3456 while ((c = *p++))
3457 switch (c)
3458 {
3459 case '\n':
3460 if (flag_verbose_asm)
3461 output_asm_operand_names (operands, oporder, ops);
3462 if (flag_print_asm_name)
3463 output_asm_name ();
3464
3465 ops = 0;
3466 memset (opoutput, 0, sizeof opoutput);
3467
3468 putc (c, asm_out_file);
3469 #ifdef ASM_OUTPUT_OPCODE
3470 while ((c = *p) == '\t')
3471 {
3472 putc (c, asm_out_file);
3473 p++;
3474 }
3475 ASM_OUTPUT_OPCODE (asm_out_file, p);
3476 #endif
3477 break;
3478
3479 #ifdef ASSEMBLER_DIALECT
3480 case '{':
3481 case '}':
3482 case '|':
3483 p = do_assembler_dialects (p, &dialect);
3484 break;
3485 #endif
3486
3487 case '%':
3488 /* %% outputs a single %. */
3489 if (*p == '%')
3490 {
3491 p++;
3492 putc (c, asm_out_file);
3493 }
3494 /* %= outputs a number which is unique to each insn in the entire
3495 compilation. This is useful for making local labels that are
3496 referred to more than once in a given insn. */
3497 else if (*p == '=')
3498 {
3499 p++;
3500 fprintf (asm_out_file, "%d", insn_counter);
3501 }
3502 /* % followed by a letter and some digits
3503 outputs an operand in a special way depending on the letter.
3504 Letters `acln' are implemented directly.
3505 Other letters are passed to `output_operand' so that
3506 the TARGET_PRINT_OPERAND hook can define them. */
3507 else if (ISALPHA (*p))
3508 {
3509 int letter = *p++;
3510 unsigned long opnum;
3511 char *endptr;
3512
3513 opnum = strtoul (p, &endptr, 10);
3514
3515 if (endptr == p)
3516 output_operand_lossage ("operand number missing "
3517 "after %%-letter");
3518 else if (this_is_asm_operands && opnum >= insn_noperands)
3519 output_operand_lossage ("operand number out of range");
3520 else if (letter == 'l')
3521 output_asm_label (operands[opnum]);
3522 else if (letter == 'a')
3523 output_address (operands[opnum]);
3524 else if (letter == 'c')
3525 {
3526 if (CONSTANT_ADDRESS_P (operands[opnum]))
3527 output_addr_const (asm_out_file, operands[opnum]);
3528 else
3529 output_operand (operands[opnum], 'c');
3530 }
3531 else if (letter == 'n')
3532 {
3533 if (CONST_INT_P (operands[opnum]))
3534 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3535 - INTVAL (operands[opnum]));
3536 else
3537 {
3538 putc ('-', asm_out_file);
3539 output_addr_const (asm_out_file, operands[opnum]);
3540 }
3541 }
3542 else
3543 output_operand (operands[opnum], letter);
3544
3545 if (!opoutput[opnum])
3546 oporder[ops++] = opnum;
3547 opoutput[opnum] = 1;
3548
3549 p = endptr;
3550 c = *p;
3551 }
3552 /* % followed by a digit outputs an operand the default way. */
3553 else if (ISDIGIT (*p))
3554 {
3555 unsigned long opnum;
3556 char *endptr;
3557
3558 opnum = strtoul (p, &endptr, 10);
3559 if (this_is_asm_operands && opnum >= insn_noperands)
3560 output_operand_lossage ("operand number out of range");
3561 else
3562 output_operand (operands[opnum], 0);
3563
3564 if (!opoutput[opnum])
3565 oporder[ops++] = opnum;
3566 opoutput[opnum] = 1;
3567
3568 p = endptr;
3569 c = *p;
3570 }
3571 /* % followed by punctuation: output something for that
3572 punctuation character alone, with no operand. The
3573 TARGET_PRINT_OPERAND hook decides what is actually done. */
3574 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3575 output_operand (NULL_RTX, *p++);
3576 else
3577 output_operand_lossage ("invalid %%-code");
3578 break;
3579
3580 default:
3581 putc (c, asm_out_file);
3582 }
3583
3584 /* Write out the variable names for operands, if we know them. */
3585 if (flag_verbose_asm)
3586 output_asm_operand_names (operands, oporder, ops);
3587 if (flag_print_asm_name)
3588 output_asm_name ();
3589
3590 putc ('\n', asm_out_file);
3591 }
3592 \f
3593 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3594
3595 void
3596 output_asm_label (rtx x)
3597 {
3598 char buf[256];
3599
3600 if (GET_CODE (x) == LABEL_REF)
3601 x = XEXP (x, 0);
3602 if (LABEL_P (x)
3603 || (NOTE_P (x)
3604 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3605 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3606 else
3607 output_operand_lossage ("'%%l' operand isn't a label");
3608
3609 assemble_name (asm_out_file, buf);
3610 }
3611
3612 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3613 output_operand. Marks SYMBOL_REFs as referenced through use of
3614 assemble_external. */
3615
3616 static int
3617 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3618 {
3619 rtx x = *xp;
3620
3621 /* If we have a used symbol, we may have to emit assembly
3622 annotations corresponding to whether the symbol is external, weak
3623 or has non-default visibility. */
3624 if (GET_CODE (x) == SYMBOL_REF)
3625 {
3626 tree t;
3627
3628 t = SYMBOL_REF_DECL (x);
3629 if (t)
3630 assemble_external (t);
3631
3632 return -1;
3633 }
3634
3635 return 0;
3636 }
3637
3638 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3639
3640 void
3641 mark_symbol_refs_as_used (rtx x)
3642 {
3643 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3644 }
3645
3646 /* Print operand X using machine-dependent assembler syntax.
3647 CODE is a non-digit that preceded the operand-number in the % spec,
3648 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3649 between the % and the digits.
3650 When CODE is a non-letter, X is 0.
3651
3652 The meanings of the letters are machine-dependent and controlled
3653 by TARGET_PRINT_OPERAND. */
3654
3655 void
3656 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3657 {
3658 if (x && GET_CODE (x) == SUBREG)
3659 x = alter_subreg (&x, true);
3660
3661 /* X must not be a pseudo reg. */
3662 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3663
3664 targetm.asm_out.print_operand (asm_out_file, x, code);
3665
3666 if (x == NULL_RTX)
3667 return;
3668
3669 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3670 }
3671
3672 /* Print a memory reference operand for address X using
3673 machine-dependent assembler syntax. */
3674
3675 void
3676 output_address (rtx x)
3677 {
3678 bool changed = false;
3679 walk_alter_subreg (&x, &changed);
3680 targetm.asm_out.print_operand_address (asm_out_file, x);
3681 }
3682 \f
3683 /* Print an integer constant expression in assembler syntax.
3684 Addition and subtraction are the only arithmetic
3685 that may appear in these expressions. */
3686
3687 void
3688 output_addr_const (FILE *file, rtx x)
3689 {
3690 char buf[256];
3691
3692 restart:
3693 switch (GET_CODE (x))
3694 {
3695 case PC:
3696 putc ('.', file);
3697 break;
3698
3699 case SYMBOL_REF:
3700 if (SYMBOL_REF_DECL (x))
3701 assemble_external (SYMBOL_REF_DECL (x));
3702 #ifdef ASM_OUTPUT_SYMBOL_REF
3703 ASM_OUTPUT_SYMBOL_REF (file, x);
3704 #else
3705 assemble_name (file, XSTR (x, 0));
3706 #endif
3707 break;
3708
3709 case LABEL_REF:
3710 x = XEXP (x, 0);
3711 /* Fall through. */
3712 case CODE_LABEL:
3713 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3714 #ifdef ASM_OUTPUT_LABEL_REF
3715 ASM_OUTPUT_LABEL_REF (file, buf);
3716 #else
3717 assemble_name (file, buf);
3718 #endif
3719 break;
3720
3721 case CONST_INT:
3722 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3723 break;
3724
3725 case CONST:
3726 /* This used to output parentheses around the expression,
3727 but that does not work on the 386 (either ATT or BSD assembler). */
3728 output_addr_const (file, XEXP (x, 0));
3729 break;
3730
3731 case CONST_DOUBLE:
3732 if (GET_MODE (x) == VOIDmode)
3733 {
3734 /* We can use %d if the number is one word and positive. */
3735 if (CONST_DOUBLE_HIGH (x))
3736 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3737 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3738 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3739 else if (CONST_DOUBLE_LOW (x) < 0)
3740 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3741 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3742 else
3743 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3744 }
3745 else
3746 /* We can't handle floating point constants;
3747 PRINT_OPERAND must handle them. */
3748 output_operand_lossage ("floating constant misused");
3749 break;
3750
3751 case CONST_FIXED:
3752 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3753 break;
3754
3755 case PLUS:
3756 /* Some assemblers need integer constants to appear last (eg masm). */
3757 if (CONST_INT_P (XEXP (x, 0)))
3758 {
3759 output_addr_const (file, XEXP (x, 1));
3760 if (INTVAL (XEXP (x, 0)) >= 0)
3761 fprintf (file, "+");
3762 output_addr_const (file, XEXP (x, 0));
3763 }
3764 else
3765 {
3766 output_addr_const (file, XEXP (x, 0));
3767 if (!CONST_INT_P (XEXP (x, 1))
3768 || INTVAL (XEXP (x, 1)) >= 0)
3769 fprintf (file, "+");
3770 output_addr_const (file, XEXP (x, 1));
3771 }
3772 break;
3773
3774 case MINUS:
3775 /* Avoid outputting things like x-x or x+5-x,
3776 since some assemblers can't handle that. */
3777 x = simplify_subtraction (x);
3778 if (GET_CODE (x) != MINUS)
3779 goto restart;
3780
3781 output_addr_const (file, XEXP (x, 0));
3782 fprintf (file, "-");
3783 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3784 || GET_CODE (XEXP (x, 1)) == PC
3785 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3786 output_addr_const (file, XEXP (x, 1));
3787 else
3788 {
3789 fputs (targetm.asm_out.open_paren, file);
3790 output_addr_const (file, XEXP (x, 1));
3791 fputs (targetm.asm_out.close_paren, file);
3792 }
3793 break;
3794
3795 case ZERO_EXTEND:
3796 case SIGN_EXTEND:
3797 case SUBREG:
3798 case TRUNCATE:
3799 output_addr_const (file, XEXP (x, 0));
3800 break;
3801
3802 default:
3803 if (targetm.asm_out.output_addr_const_extra (file, x))
3804 break;
3805
3806 output_operand_lossage ("invalid expression as operand");
3807 }
3808 }
3809 \f
3810 /* Output a quoted string. */
3811
3812 void
3813 output_quoted_string (FILE *asm_file, const char *string)
3814 {
3815 #ifdef OUTPUT_QUOTED_STRING
3816 OUTPUT_QUOTED_STRING (asm_file, string);
3817 #else
3818 char c;
3819
3820 putc ('\"', asm_file);
3821 while ((c = *string++) != 0)
3822 {
3823 if (ISPRINT (c))
3824 {
3825 if (c == '\"' || c == '\\')
3826 putc ('\\', asm_file);
3827 putc (c, asm_file);
3828 }
3829 else
3830 fprintf (asm_file, "\\%03o", (unsigned char) c);
3831 }
3832 putc ('\"', asm_file);
3833 #endif
3834 }
3835 \f
3836 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3837
3838 void
3839 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3840 {
3841 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3842 if (value == 0)
3843 putc ('0', f);
3844 else
3845 {
3846 char *p = buf + sizeof (buf);
3847 do
3848 *--p = "0123456789abcdef"[value % 16];
3849 while ((value /= 16) != 0);
3850 *--p = 'x';
3851 *--p = '0';
3852 fwrite (p, 1, buf + sizeof (buf) - p, f);
3853 }
3854 }
3855
3856 /* Internal function that prints an unsigned long in decimal in reverse.
3857 The output string IS NOT null-terminated. */
3858
3859 static int
3860 sprint_ul_rev (char *s, unsigned long value)
3861 {
3862 int i = 0;
3863 do
3864 {
3865 s[i] = "0123456789"[value % 10];
3866 value /= 10;
3867 i++;
3868 /* alternate version, without modulo */
3869 /* oldval = value; */
3870 /* value /= 10; */
3871 /* s[i] = "0123456789" [oldval - 10*value]; */
3872 /* i++ */
3873 }
3874 while (value != 0);
3875 return i;
3876 }
3877
3878 /* Write an unsigned long as decimal to a file, fast. */
3879
3880 void
3881 fprint_ul (FILE *f, unsigned long value)
3882 {
3883 /* python says: len(str(2**64)) == 20 */
3884 char s[20];
3885 int i;
3886
3887 i = sprint_ul_rev (s, value);
3888
3889 /* It's probably too small to bother with string reversal and fputs. */
3890 do
3891 {
3892 i--;
3893 putc (s[i], f);
3894 }
3895 while (i != 0);
3896 }
3897
3898 /* Write an unsigned long as decimal to a string, fast.
3899 s must be wide enough to not overflow, at least 21 chars.
3900 Returns the length of the string (without terminating '\0'). */
3901
3902 int
3903 sprint_ul (char *s, unsigned long value)
3904 {
3905 int len;
3906 char tmp_c;
3907 int i;
3908 int j;
3909
3910 len = sprint_ul_rev (s, value);
3911 s[len] = '\0';
3912
3913 /* Reverse the string. */
3914 i = 0;
3915 j = len - 1;
3916 while (i < j)
3917 {
3918 tmp_c = s[i];
3919 s[i] = s[j];
3920 s[j] = tmp_c;
3921 i++; j--;
3922 }
3923
3924 return len;
3925 }
3926
3927 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3928 %R prints the value of REGISTER_PREFIX.
3929 %L prints the value of LOCAL_LABEL_PREFIX.
3930 %U prints the value of USER_LABEL_PREFIX.
3931 %I prints the value of IMMEDIATE_PREFIX.
3932 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3933 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3934
3935 We handle alternate assembler dialects here, just like output_asm_insn. */
3936
3937 void
3938 asm_fprintf (FILE *file, const char *p, ...)
3939 {
3940 char buf[10];
3941 char *q, c;
3942 #ifdef ASSEMBLER_DIALECT
3943 int dialect = 0;
3944 #endif
3945 va_list argptr;
3946
3947 va_start (argptr, p);
3948
3949 buf[0] = '%';
3950
3951 while ((c = *p++))
3952 switch (c)
3953 {
3954 #ifdef ASSEMBLER_DIALECT
3955 case '{':
3956 case '}':
3957 case '|':
3958 p = do_assembler_dialects (p, &dialect);
3959 break;
3960 #endif
3961
3962 case '%':
3963 c = *p++;
3964 q = &buf[1];
3965 while (strchr ("-+ #0", c))
3966 {
3967 *q++ = c;
3968 c = *p++;
3969 }
3970 while (ISDIGIT (c) || c == '.')
3971 {
3972 *q++ = c;
3973 c = *p++;
3974 }
3975 switch (c)
3976 {
3977 case '%':
3978 putc ('%', file);
3979 break;
3980
3981 case 'd': case 'i': case 'u':
3982 case 'x': case 'X': case 'o':
3983 case 'c':
3984 *q++ = c;
3985 *q = 0;
3986 fprintf (file, buf, va_arg (argptr, int));
3987 break;
3988
3989 case 'w':
3990 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3991 'o' cases, but we do not check for those cases. It
3992 means that the value is a HOST_WIDE_INT, which may be
3993 either `long' or `long long'. */
3994 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3995 q += strlen (HOST_WIDE_INT_PRINT);
3996 *q++ = *p++;
3997 *q = 0;
3998 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3999 break;
4000
4001 case 'l':
4002 *q++ = c;
4003 #ifdef HAVE_LONG_LONG
4004 if (*p == 'l')
4005 {
4006 *q++ = *p++;
4007 *q++ = *p++;
4008 *q = 0;
4009 fprintf (file, buf, va_arg (argptr, long long));
4010 }
4011 else
4012 #endif
4013 {
4014 *q++ = *p++;
4015 *q = 0;
4016 fprintf (file, buf, va_arg (argptr, long));
4017 }
4018
4019 break;
4020
4021 case 's':
4022 *q++ = c;
4023 *q = 0;
4024 fprintf (file, buf, va_arg (argptr, char *));
4025 break;
4026
4027 case 'O':
4028 #ifdef ASM_OUTPUT_OPCODE
4029 ASM_OUTPUT_OPCODE (asm_out_file, p);
4030 #endif
4031 break;
4032
4033 case 'R':
4034 #ifdef REGISTER_PREFIX
4035 fprintf (file, "%s", REGISTER_PREFIX);
4036 #endif
4037 break;
4038
4039 case 'I':
4040 #ifdef IMMEDIATE_PREFIX
4041 fprintf (file, "%s", IMMEDIATE_PREFIX);
4042 #endif
4043 break;
4044
4045 case 'L':
4046 #ifdef LOCAL_LABEL_PREFIX
4047 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4048 #endif
4049 break;
4050
4051 case 'U':
4052 fputs (user_label_prefix, file);
4053 break;
4054
4055 #ifdef ASM_FPRINTF_EXTENSIONS
4056 /* Uppercase letters are reserved for general use by asm_fprintf
4057 and so are not available to target specific code. In order to
4058 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4059 they are defined here. As they get turned into real extensions
4060 to asm_fprintf they should be removed from this list. */
4061 case 'A': case 'B': case 'C': case 'D': case 'E':
4062 case 'F': case 'G': case 'H': case 'J': case 'K':
4063 case 'M': case 'N': case 'P': case 'Q': case 'S':
4064 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4065 break;
4066
4067 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4068 #endif
4069 default:
4070 gcc_unreachable ();
4071 }
4072 break;
4073
4074 default:
4075 putc (c, file);
4076 }
4077 va_end (argptr);
4078 }
4079 \f
4080 /* Return nonzero if this function has no function calls. */
4081
4082 int
4083 leaf_function_p (void)
4084 {
4085 rtx insn;
4086 rtx link;
4087
4088 if (crtl->profile || profile_arc_flag)
4089 return 0;
4090
4091 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4092 {
4093 if (CALL_P (insn)
4094 && ! SIBLING_CALL_P (insn))
4095 return 0;
4096 if (NONJUMP_INSN_P (insn)
4097 && GET_CODE (PATTERN (insn)) == SEQUENCE
4098 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4099 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4100 return 0;
4101 }
4102 for (link = crtl->epilogue_delay_list;
4103 link;
4104 link = XEXP (link, 1))
4105 {
4106 insn = XEXP (link, 0);
4107
4108 if (CALL_P (insn)
4109 && ! SIBLING_CALL_P (insn))
4110 return 0;
4111 if (NONJUMP_INSN_P (insn)
4112 && GET_CODE (PATTERN (insn)) == SEQUENCE
4113 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4114 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4115 return 0;
4116 }
4117
4118 return 1;
4119 }
4120
4121 /* Return 1 if branch is a forward branch.
4122 Uses insn_shuid array, so it works only in the final pass. May be used by
4123 output templates to customary add branch prediction hints.
4124 */
4125 int
4126 final_forward_branch_p (rtx insn)
4127 {
4128 int insn_id, label_id;
4129
4130 gcc_assert (uid_shuid);
4131 insn_id = INSN_SHUID (insn);
4132 label_id = INSN_SHUID (JUMP_LABEL (insn));
4133 /* We've hit some insns that does not have id information available. */
4134 gcc_assert (insn_id && label_id);
4135 return insn_id < label_id;
4136 }
4137
4138 /* On some machines, a function with no call insns
4139 can run faster if it doesn't create its own register window.
4140 When output, the leaf function should use only the "output"
4141 registers. Ordinarily, the function would be compiled to use
4142 the "input" registers to find its arguments; it is a candidate
4143 for leaf treatment if it uses only the "input" registers.
4144 Leaf function treatment means renumbering so the function
4145 uses the "output" registers instead. */
4146
4147 #ifdef LEAF_REGISTERS
4148
4149 /* Return 1 if this function uses only the registers that can be
4150 safely renumbered. */
4151
4152 int
4153 only_leaf_regs_used (void)
4154 {
4155 int i;
4156 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4157
4158 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4159 if ((df_regs_ever_live_p (i) || global_regs[i])
4160 && ! permitted_reg_in_leaf_functions[i])
4161 return 0;
4162
4163 if (crtl->uses_pic_offset_table
4164 && pic_offset_table_rtx != 0
4165 && REG_P (pic_offset_table_rtx)
4166 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4167 return 0;
4168
4169 return 1;
4170 }
4171
4172 /* Scan all instructions and renumber all registers into those
4173 available in leaf functions. */
4174
4175 static void
4176 leaf_renumber_regs (rtx first)
4177 {
4178 rtx insn;
4179
4180 /* Renumber only the actual patterns.
4181 The reg-notes can contain frame pointer refs,
4182 and renumbering them could crash, and should not be needed. */
4183 for (insn = first; insn; insn = NEXT_INSN (insn))
4184 if (INSN_P (insn))
4185 leaf_renumber_regs_insn (PATTERN (insn));
4186 for (insn = crtl->epilogue_delay_list;
4187 insn;
4188 insn = XEXP (insn, 1))
4189 if (INSN_P (XEXP (insn, 0)))
4190 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4191 }
4192
4193 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4194 available in leaf functions. */
4195
4196 void
4197 leaf_renumber_regs_insn (rtx in_rtx)
4198 {
4199 int i, j;
4200 const char *format_ptr;
4201
4202 if (in_rtx == 0)
4203 return;
4204
4205 /* Renumber all input-registers into output-registers.
4206 renumbered_regs would be 1 for an output-register;
4207 they */
4208
4209 if (REG_P (in_rtx))
4210 {
4211 int newreg;
4212
4213 /* Don't renumber the same reg twice. */
4214 if (in_rtx->used)
4215 return;
4216
4217 newreg = REGNO (in_rtx);
4218 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4219 to reach here as part of a REG_NOTE. */
4220 if (newreg >= FIRST_PSEUDO_REGISTER)
4221 {
4222 in_rtx->used = 1;
4223 return;
4224 }
4225 newreg = LEAF_REG_REMAP (newreg);
4226 gcc_assert (newreg >= 0);
4227 df_set_regs_ever_live (REGNO (in_rtx), false);
4228 df_set_regs_ever_live (newreg, true);
4229 SET_REGNO (in_rtx, newreg);
4230 in_rtx->used = 1;
4231 }
4232
4233 if (INSN_P (in_rtx))
4234 {
4235 /* Inside a SEQUENCE, we find insns.
4236 Renumber just the patterns of these insns,
4237 just as we do for the top-level insns. */
4238 leaf_renumber_regs_insn (PATTERN (in_rtx));
4239 return;
4240 }
4241
4242 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4243
4244 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4245 switch (*format_ptr++)
4246 {
4247 case 'e':
4248 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4249 break;
4250
4251 case 'E':
4252 if (NULL != XVEC (in_rtx, i))
4253 {
4254 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4255 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4256 }
4257 break;
4258
4259 case 'S':
4260 case 's':
4261 case '0':
4262 case 'i':
4263 case 'w':
4264 case 'n':
4265 case 'u':
4266 break;
4267
4268 default:
4269 gcc_unreachable ();
4270 }
4271 }
4272 #endif
4273 \f
4274 /* Turn the RTL into assembly. */
4275 static unsigned int
4276 rest_of_handle_final (void)
4277 {
4278 rtx x;
4279 const char *fnname;
4280
4281 /* Get the function's name, as described by its RTL. This may be
4282 different from the DECL_NAME name used in the source file. */
4283
4284 x = DECL_RTL (current_function_decl);
4285 gcc_assert (MEM_P (x));
4286 x = XEXP (x, 0);
4287 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4288 fnname = XSTR (x, 0);
4289
4290 assemble_start_function (current_function_decl, fnname);
4291 final_start_function (get_insns (), asm_out_file, optimize);
4292 final (get_insns (), asm_out_file, optimize);
4293 final_end_function ();
4294
4295 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4296 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4297 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4298 output_function_exception_table (fnname);
4299
4300 assemble_end_function (current_function_decl, fnname);
4301
4302 user_defined_section_attribute = false;
4303
4304 /* Free up reg info memory. */
4305 free_reg_info ();
4306
4307 if (! quiet_flag)
4308 fflush (asm_out_file);
4309
4310 /* Write DBX symbols if requested. */
4311
4312 /* Note that for those inline functions where we don't initially
4313 know for certain that we will be generating an out-of-line copy,
4314 the first invocation of this routine (rest_of_compilation) will
4315 skip over this code by doing a `goto exit_rest_of_compilation;'.
4316 Later on, wrapup_global_declarations will (indirectly) call
4317 rest_of_compilation again for those inline functions that need
4318 to have out-of-line copies generated. During that call, we
4319 *will* be routed past here. */
4320
4321 timevar_push (TV_SYMOUT);
4322 if (!DECL_IGNORED_P (current_function_decl))
4323 debug_hooks->function_decl (current_function_decl);
4324 timevar_pop (TV_SYMOUT);
4325
4326 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4327 DECL_INITIAL (current_function_decl) = error_mark_node;
4328
4329 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4330 && targetm.have_ctors_dtors)
4331 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4332 decl_init_priority_lookup
4333 (current_function_decl));
4334 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4335 && targetm.have_ctors_dtors)
4336 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4337 decl_fini_priority_lookup
4338 (current_function_decl));
4339 return 0;
4340 }
4341
4342 struct rtl_opt_pass pass_final =
4343 {
4344 {
4345 RTL_PASS,
4346 "final", /* name */
4347 OPTGROUP_NONE, /* optinfo_flags */
4348 NULL, /* gate */
4349 rest_of_handle_final, /* execute */
4350 NULL, /* sub */
4351 NULL, /* next */
4352 0, /* static_pass_number */
4353 TV_FINAL, /* tv_id */
4354 0, /* properties_required */
4355 0, /* properties_provided */
4356 0, /* properties_destroyed */
4357 0, /* todo_flags_start */
4358 TODO_ggc_collect /* todo_flags_finish */
4359 }
4360 };
4361
4362
4363 static unsigned int
4364 rest_of_handle_shorten_branches (void)
4365 {
4366 /* Shorten branches. */
4367 shorten_branches (get_insns ());
4368 return 0;
4369 }
4370
4371 struct rtl_opt_pass pass_shorten_branches =
4372 {
4373 {
4374 RTL_PASS,
4375 "shorten", /* name */
4376 OPTGROUP_NONE, /* optinfo_flags */
4377 NULL, /* gate */
4378 rest_of_handle_shorten_branches, /* execute */
4379 NULL, /* sub */
4380 NULL, /* next */
4381 0, /* static_pass_number */
4382 TV_SHORTEN_BRANCH, /* tv_id */
4383 0, /* properties_required */
4384 0, /* properties_provided */
4385 0, /* properties_destroyed */
4386 0, /* todo_flags_start */
4387 0 /* todo_flags_finish */
4388 }
4389 };
4390
4391
4392 static unsigned int
4393 rest_of_clean_state (void)
4394 {
4395 rtx insn, next;
4396 FILE *final_output = NULL;
4397 int save_unnumbered = flag_dump_unnumbered;
4398 int save_noaddr = flag_dump_noaddr;
4399
4400 if (flag_dump_final_insns)
4401 {
4402 final_output = fopen (flag_dump_final_insns, "a");
4403 if (!final_output)
4404 {
4405 error ("could not open final insn dump file %qs: %m",
4406 flag_dump_final_insns);
4407 flag_dump_final_insns = NULL;
4408 }
4409 else
4410 {
4411 flag_dump_noaddr = flag_dump_unnumbered = 1;
4412 if (flag_compare_debug_opt || flag_compare_debug)
4413 dump_flags |= TDF_NOUID;
4414 dump_function_header (final_output, current_function_decl,
4415 dump_flags);
4416 final_insns_dump_p = true;
4417
4418 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4419 if (LABEL_P (insn))
4420 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4421 else
4422 {
4423 if (NOTE_P (insn))
4424 set_block_for_insn (insn, NULL);
4425 INSN_UID (insn) = 0;
4426 }
4427 }
4428 }
4429
4430 /* It is very important to decompose the RTL instruction chain here:
4431 debug information keeps pointing into CODE_LABEL insns inside the function
4432 body. If these remain pointing to the other insns, we end up preserving
4433 whole RTL chain and attached detailed debug info in memory. */
4434 for (insn = get_insns (); insn; insn = next)
4435 {
4436 next = NEXT_INSN (insn);
4437 NEXT_INSN (insn) = NULL;
4438 PREV_INSN (insn) = NULL;
4439
4440 if (final_output
4441 && (!NOTE_P (insn) ||
4442 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4443 && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4444 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4445 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4446 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4447 print_rtl_single (final_output, insn);
4448 }
4449
4450 if (final_output)
4451 {
4452 flag_dump_noaddr = save_noaddr;
4453 flag_dump_unnumbered = save_unnumbered;
4454 final_insns_dump_p = false;
4455
4456 if (fclose (final_output))
4457 {
4458 error ("could not close final insn dump file %qs: %m",
4459 flag_dump_final_insns);
4460 flag_dump_final_insns = NULL;
4461 }
4462 }
4463
4464 /* In case the function was not output,
4465 don't leave any temporary anonymous types
4466 queued up for sdb output. */
4467 #ifdef SDB_DEBUGGING_INFO
4468 if (write_symbols == SDB_DEBUG)
4469 sdbout_types (NULL_TREE);
4470 #endif
4471
4472 flag_rerun_cse_after_global_opts = 0;
4473 reload_completed = 0;
4474 epilogue_completed = 0;
4475 #ifdef STACK_REGS
4476 regstack_completed = 0;
4477 #endif
4478
4479 /* Clear out the insn_length contents now that they are no
4480 longer valid. */
4481 init_insn_lengths ();
4482
4483 /* Show no temporary slots allocated. */
4484 init_temp_slots ();
4485
4486 free_bb_for_insn ();
4487
4488 delete_tree_ssa ();
4489
4490 /* We can reduce stack alignment on call site only when we are sure that
4491 the function body just produced will be actually used in the final
4492 executable. */
4493 if (decl_binds_to_current_def_p (current_function_decl))
4494 {
4495 unsigned int pref = crtl->preferred_stack_boundary;
4496 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4497 pref = crtl->stack_alignment_needed;
4498 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4499 = pref;
4500 }
4501
4502 /* Make sure volatile mem refs aren't considered valid operands for
4503 arithmetic insns. We must call this here if this is a nested inline
4504 function, since the above code leaves us in the init_recog state,
4505 and the function context push/pop code does not save/restore volatile_ok.
4506
4507 ??? Maybe it isn't necessary for expand_start_function to call this
4508 anymore if we do it here? */
4509
4510 init_recog_no_volatile ();
4511
4512 /* We're done with this function. Free up memory if we can. */
4513 free_after_parsing (cfun);
4514 free_after_compilation (cfun);
4515 return 0;
4516 }
4517
4518 struct rtl_opt_pass pass_clean_state =
4519 {
4520 {
4521 RTL_PASS,
4522 "*clean_state", /* name */
4523 OPTGROUP_NONE, /* optinfo_flags */
4524 NULL, /* gate */
4525 rest_of_clean_state, /* execute */
4526 NULL, /* sub */
4527 NULL, /* next */
4528 0, /* static_pass_number */
4529 TV_FINAL, /* tv_id */
4530 0, /* properties_required */
4531 0, /* properties_provided */
4532 PROP_rtl, /* properties_destroyed */
4533 0, /* todo_flags_start */
4534 0 /* todo_flags_finish */
4535 }
4536 };