intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "hard-reg-set.h"
62 #include "output.h"
63 #include "except.h"
64 #include "function.h"
65 #include "toplev.h"
66 #include "reload.h"
67 #include "intl.h"
68 #include "basic-block.h"
69 #include "target.h"
70 #include "debug.h"
71 #include "expr.h"
72 #include "cfglayout.h"
73 #include "tree-pass.h"
74 #include "tree-flow.h"
75 #include "timevar.h"
76 #include "cgraph.h"
77 #include "coverage.h"
78 #include "df.h"
79 #include "vecprim.h"
80 #include "ggc.h"
81 #include "cfgloop.h"
82 #include "params.h"
83
84 #ifdef XCOFF_DEBUGGING_INFO
85 #include "xcoffout.h" /* Needed for external data
86 declarations for e.g. AIX 4.x. */
87 #endif
88
89 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
90 #include "dwarf2out.h"
91 #endif
92
93 #ifdef DBX_DEBUGGING_INFO
94 #include "dbxout.h"
95 #endif
96
97 #ifdef SDB_DEBUGGING_INFO
98 #include "sdbout.h"
99 #endif
100
101 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
102 null default for it to save conditionalization later. */
103 #ifndef CC_STATUS_INIT
104 #define CC_STATUS_INIT
105 #endif
106
107 /* How to start an assembler comment. */
108 #ifndef ASM_COMMENT_START
109 #define ASM_COMMENT_START ";#"
110 #endif
111
112 /* Is the given character a logical line separator for the assembler? */
113 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
114 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
115 #endif
116
117 #ifndef JUMP_TABLES_IN_TEXT_SECTION
118 #define JUMP_TABLES_IN_TEXT_SECTION 0
119 #endif
120
121 /* Bitflags used by final_scan_insn. */
122 #define SEEN_BB 1
123 #define SEEN_NOTE 2
124 #define SEEN_EMITTED 4
125
126 /* Last insn processed by final_scan_insn. */
127 static rtx debug_insn;
128 rtx current_output_insn;
129
130 /* Line number of last NOTE. */
131 static int last_linenum;
132
133 /* Last discriminator written to assembly. */
134 static int last_discriminator;
135
136 /* Discriminator of current block. */
137 static int discriminator;
138
139 /* Highest line number in current block. */
140 static int high_block_linenum;
141
142 /* Likewise for function. */
143 static int high_function_linenum;
144
145 /* Filename of last NOTE. */
146 static const char *last_filename;
147
148 /* Override filename and line number. */
149 static const char *override_filename;
150 static int override_linenum;
151
152 /* Whether to force emission of a line note before the next insn. */
153 static bool force_source_line = false;
154
155 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
156
157 /* Nonzero while outputting an `asm' with operands.
158 This means that inconsistencies are the user's fault, so don't die.
159 The precise value is the insn being output, to pass to error_for_asm. */
160 rtx this_is_asm_operands;
161
162 /* Number of operands of this insn, for an `asm' with operands. */
163 static unsigned int insn_noperands;
164
165 /* Compare optimization flag. */
166
167 static rtx last_ignored_compare = 0;
168
169 /* Assign a unique number to each insn that is output.
170 This can be used to generate unique local labels. */
171
172 static int insn_counter = 0;
173
174 #ifdef HAVE_cc0
175 /* This variable contains machine-dependent flags (defined in tm.h)
176 set and examined by output routines
177 that describe how to interpret the condition codes properly. */
178
179 CC_STATUS cc_status;
180
181 /* During output of an insn, this contains a copy of cc_status
182 from before the insn. */
183
184 CC_STATUS cc_prev_status;
185 #endif
186
187 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
188
189 static int block_depth;
190
191 /* Nonzero if have enabled APP processing of our assembler output. */
192
193 static int app_on;
194
195 /* If we are outputting an insn sequence, this contains the sequence rtx.
196 Zero otherwise. */
197
198 rtx final_sequence;
199
200 #ifdef ASSEMBLER_DIALECT
201
202 /* Number of the assembler dialect to use, starting at 0. */
203 static int dialect_number;
204 #endif
205
206 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
207 rtx current_insn_predicate;
208
209 /* True if printing into -fdump-final-insns= dump. */
210 bool final_insns_dump_p;
211
212 #ifdef HAVE_ATTR_length
213 static int asm_insn_count (rtx);
214 #endif
215 static void profile_function (FILE *);
216 static void profile_after_prologue (FILE *);
217 static bool notice_source_line (rtx, bool *);
218 static rtx walk_alter_subreg (rtx *, bool *);
219 static void output_asm_name (void);
220 static void output_alternate_entry_point (FILE *, rtx);
221 static tree get_mem_expr_from_op (rtx, int *);
222 static void output_asm_operand_names (rtx *, int *, int);
223 #ifdef LEAF_REGISTERS
224 static void leaf_renumber_regs (rtx);
225 #endif
226 #ifdef HAVE_cc0
227 static int alter_cond (rtx);
228 #endif
229 #ifndef ADDR_VEC_ALIGN
230 static int final_addr_vec_align (rtx);
231 #endif
232 #ifdef HAVE_ATTR_length
233 static int align_fuzz (rtx, rtx, int, unsigned);
234 #endif
235 \f
236 /* Initialize data in final at the beginning of a compilation. */
237
238 void
239 init_final (const char *filename ATTRIBUTE_UNUSED)
240 {
241 app_on = 0;
242 final_sequence = 0;
243
244 #ifdef ASSEMBLER_DIALECT
245 dialect_number = ASSEMBLER_DIALECT;
246 #endif
247 }
248
249 /* Default target function prologue and epilogue assembler output.
250
251 If not overridden for epilogue code, then the function body itself
252 contains return instructions wherever needed. */
253 void
254 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
255 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
256 {
257 }
258
259 /* Default target hook that outputs nothing to a stream. */
260 void
261 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
262 {
263 }
264
265 /* Enable APP processing of subsequent output.
266 Used before the output from an `asm' statement. */
267
268 void
269 app_enable (void)
270 {
271 if (! app_on)
272 {
273 fputs (ASM_APP_ON, asm_out_file);
274 app_on = 1;
275 }
276 }
277
278 /* Disable APP processing of subsequent output.
279 Called from varasm.c before most kinds of output. */
280
281 void
282 app_disable (void)
283 {
284 if (app_on)
285 {
286 fputs (ASM_APP_OFF, asm_out_file);
287 app_on = 0;
288 }
289 }
290 \f
291 /* Return the number of slots filled in the current
292 delayed branch sequence (we don't count the insn needing the
293 delay slot). Zero if not in a delayed branch sequence. */
294
295 #ifdef DELAY_SLOTS
296 int
297 dbr_sequence_length (void)
298 {
299 if (final_sequence != 0)
300 return XVECLEN (final_sequence, 0) - 1;
301 else
302 return 0;
303 }
304 #endif
305 \f
306 /* The next two pages contain routines used to compute the length of an insn
307 and to shorten branches. */
308
309 /* Arrays for insn lengths, and addresses. The latter is referenced by
310 `insn_current_length'. */
311
312 static int *insn_lengths;
313
314 VEC(int,heap) *insn_addresses_;
315
316 /* Max uid for which the above arrays are valid. */
317 static int insn_lengths_max_uid;
318
319 /* Address of insn being processed. Used by `insn_current_length'. */
320 int insn_current_address;
321
322 /* Address of insn being processed in previous iteration. */
323 int insn_last_address;
324
325 /* known invariant alignment of insn being processed. */
326 int insn_current_align;
327
328 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
329 gives the next following alignment insn that increases the known
330 alignment, or NULL_RTX if there is no such insn.
331 For any alignment obtained this way, we can again index uid_align with
332 its uid to obtain the next following align that in turn increases the
333 alignment, till we reach NULL_RTX; the sequence obtained this way
334 for each insn we'll call the alignment chain of this insn in the following
335 comments. */
336
337 struct label_alignment
338 {
339 short alignment;
340 short max_skip;
341 };
342
343 static rtx *uid_align;
344 static int *uid_shuid;
345 static struct label_alignment *label_align;
346
347 /* Indicate that branch shortening hasn't yet been done. */
348
349 void
350 init_insn_lengths (void)
351 {
352 if (uid_shuid)
353 {
354 free (uid_shuid);
355 uid_shuid = 0;
356 }
357 if (insn_lengths)
358 {
359 free (insn_lengths);
360 insn_lengths = 0;
361 insn_lengths_max_uid = 0;
362 }
363 #ifdef HAVE_ATTR_length
364 INSN_ADDRESSES_FREE ();
365 #endif
366 if (uid_align)
367 {
368 free (uid_align);
369 uid_align = 0;
370 }
371 }
372
373 /* Obtain the current length of an insn. If branch shortening has been done,
374 get its actual length. Otherwise, use FALLBACK_FN to calculate the
375 length. */
376 static inline int
377 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
378 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
379 {
380 #ifdef HAVE_ATTR_length
381 rtx body;
382 int i;
383 int length = 0;
384
385 if (insn_lengths_max_uid > INSN_UID (insn))
386 return insn_lengths[INSN_UID (insn)];
387 else
388 switch (GET_CODE (insn))
389 {
390 case NOTE:
391 case BARRIER:
392 case CODE_LABEL:
393 case DEBUG_INSN:
394 return 0;
395
396 case CALL_INSN:
397 length = fallback_fn (insn);
398 break;
399
400 case JUMP_INSN:
401 body = PATTERN (insn);
402 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 {
404 /* Alignment is machine-dependent and should be handled by
405 ADDR_VEC_ALIGN. */
406 }
407 else
408 length = fallback_fn (insn);
409 break;
410
411 case INSN:
412 body = PATTERN (insn);
413 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
414 return 0;
415
416 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
417 length = asm_insn_count (body) * fallback_fn (insn);
418 else if (GET_CODE (body) == SEQUENCE)
419 for (i = 0; i < XVECLEN (body, 0); i++)
420 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
421 else
422 length = fallback_fn (insn);
423 break;
424
425 default:
426 break;
427 }
428
429 #ifdef ADJUST_INSN_LENGTH
430 ADJUST_INSN_LENGTH (insn, length);
431 #endif
432 return length;
433 #else /* not HAVE_ATTR_length */
434 return 0;
435 #define insn_default_length 0
436 #define insn_min_length 0
437 #endif /* not HAVE_ATTR_length */
438 }
439
440 /* Obtain the current length of an insn. If branch shortening has been done,
441 get its actual length. Otherwise, get its maximum length. */
442 int
443 get_attr_length (rtx insn)
444 {
445 return get_attr_length_1 (insn, insn_default_length);
446 }
447
448 /* Obtain the current length of an insn. If branch shortening has been done,
449 get its actual length. Otherwise, get its minimum length. */
450 int
451 get_attr_min_length (rtx insn)
452 {
453 return get_attr_length_1 (insn, insn_min_length);
454 }
455 \f
456 /* Code to handle alignment inside shorten_branches. */
457
458 /* Here is an explanation how the algorithm in align_fuzz can give
459 proper results:
460
461 Call a sequence of instructions beginning with alignment point X
462 and continuing until the next alignment point `block X'. When `X'
463 is used in an expression, it means the alignment value of the
464 alignment point.
465
466 Call the distance between the start of the first insn of block X, and
467 the end of the last insn of block X `IX', for the `inner size of X'.
468 This is clearly the sum of the instruction lengths.
469
470 Likewise with the next alignment-delimited block following X, which we
471 shall call block Y.
472
473 Call the distance between the start of the first insn of block X, and
474 the start of the first insn of block Y `OX', for the `outer size of X'.
475
476 The estimated padding is then OX - IX.
477
478 OX can be safely estimated as
479
480 if (X >= Y)
481 OX = round_up(IX, Y)
482 else
483 OX = round_up(IX, X) + Y - X
484
485 Clearly est(IX) >= real(IX), because that only depends on the
486 instruction lengths, and those being overestimated is a given.
487
488 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
489 we needn't worry about that when thinking about OX.
490
491 When X >= Y, the alignment provided by Y adds no uncertainty factor
492 for branch ranges starting before X, so we can just round what we have.
493 But when X < Y, we don't know anything about the, so to speak,
494 `middle bits', so we have to assume the worst when aligning up from an
495 address mod X to one mod Y, which is Y - X. */
496
497 #ifndef LABEL_ALIGN
498 #define LABEL_ALIGN(LABEL) align_labels_log
499 #endif
500
501 #ifndef LABEL_ALIGN_MAX_SKIP
502 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
503 #endif
504
505 #ifndef LOOP_ALIGN
506 #define LOOP_ALIGN(LABEL) align_loops_log
507 #endif
508
509 #ifndef LOOP_ALIGN_MAX_SKIP
510 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
511 #endif
512
513 #ifndef LABEL_ALIGN_AFTER_BARRIER
514 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
515 #endif
516
517 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
518 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
519 #endif
520
521 #ifndef JUMP_ALIGN
522 #define JUMP_ALIGN(LABEL) align_jumps_log
523 #endif
524
525 #ifndef JUMP_ALIGN_MAX_SKIP
526 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
527 #endif
528
529 #ifndef ADDR_VEC_ALIGN
530 static int
531 final_addr_vec_align (rtx addr_vec)
532 {
533 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
534
535 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
536 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
537 return exact_log2 (align);
538
539 }
540
541 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
542 #endif
543
544 #ifndef INSN_LENGTH_ALIGNMENT
545 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
546 #endif
547
548 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
549
550 static int min_labelno, max_labelno;
551
552 #define LABEL_TO_ALIGNMENT(LABEL) \
553 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
554
555 #define LABEL_TO_MAX_SKIP(LABEL) \
556 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
557
558 /* For the benefit of port specific code do this also as a function. */
559
560 int
561 label_to_alignment (rtx label)
562 {
563 if (CODE_LABEL_NUMBER (label) <= max_labelno)
564 return LABEL_TO_ALIGNMENT (label);
565 return 0;
566 }
567
568 int
569 label_to_max_skip (rtx label)
570 {
571 if (CODE_LABEL_NUMBER (label) <= max_labelno)
572 return LABEL_TO_MAX_SKIP (label);
573 return 0;
574 }
575
576 #ifdef HAVE_ATTR_length
577 /* The differences in addresses
578 between a branch and its target might grow or shrink depending on
579 the alignment the start insn of the range (the branch for a forward
580 branch or the label for a backward branch) starts out on; if these
581 differences are used naively, they can even oscillate infinitely.
582 We therefore want to compute a 'worst case' address difference that
583 is independent of the alignment the start insn of the range end
584 up on, and that is at least as large as the actual difference.
585 The function align_fuzz calculates the amount we have to add to the
586 naively computed difference, by traversing the part of the alignment
587 chain of the start insn of the range that is in front of the end insn
588 of the range, and considering for each alignment the maximum amount
589 that it might contribute to a size increase.
590
591 For casesi tables, we also want to know worst case minimum amounts of
592 address difference, in case a machine description wants to introduce
593 some common offset that is added to all offsets in a table.
594 For this purpose, align_fuzz with a growth argument of 0 computes the
595 appropriate adjustment. */
596
597 /* Compute the maximum delta by which the difference of the addresses of
598 START and END might grow / shrink due to a different address for start
599 which changes the size of alignment insns between START and END.
600 KNOWN_ALIGN_LOG is the alignment known for START.
601 GROWTH should be ~0 if the objective is to compute potential code size
602 increase, and 0 if the objective is to compute potential shrink.
603 The return value is undefined for any other value of GROWTH. */
604
605 static int
606 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
607 {
608 int uid = INSN_UID (start);
609 rtx align_label;
610 int known_align = 1 << known_align_log;
611 int end_shuid = INSN_SHUID (end);
612 int fuzz = 0;
613
614 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
615 {
616 int align_addr, new_align;
617
618 uid = INSN_UID (align_label);
619 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
620 if (uid_shuid[uid] > end_shuid)
621 break;
622 known_align_log = LABEL_TO_ALIGNMENT (align_label);
623 new_align = 1 << known_align_log;
624 if (new_align < known_align)
625 continue;
626 fuzz += (-align_addr ^ growth) & (new_align - known_align);
627 known_align = new_align;
628 }
629 return fuzz;
630 }
631
632 /* Compute a worst-case reference address of a branch so that it
633 can be safely used in the presence of aligned labels. Since the
634 size of the branch itself is unknown, the size of the branch is
635 not included in the range. I.e. for a forward branch, the reference
636 address is the end address of the branch as known from the previous
637 branch shortening pass, minus a value to account for possible size
638 increase due to alignment. For a backward branch, it is the start
639 address of the branch as known from the current pass, plus a value
640 to account for possible size increase due to alignment.
641 NB.: Therefore, the maximum offset allowed for backward branches needs
642 to exclude the branch size. */
643
644 int
645 insn_current_reference_address (rtx branch)
646 {
647 rtx dest, seq;
648 int seq_uid;
649
650 if (! INSN_ADDRESSES_SET_P ())
651 return 0;
652
653 seq = NEXT_INSN (PREV_INSN (branch));
654 seq_uid = INSN_UID (seq);
655 if (!JUMP_P (branch))
656 /* This can happen for example on the PA; the objective is to know the
657 offset to address something in front of the start of the function.
658 Thus, we can treat it like a backward branch.
659 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
660 any alignment we'd encounter, so we skip the call to align_fuzz. */
661 return insn_current_address;
662 dest = JUMP_LABEL (branch);
663
664 /* BRANCH has no proper alignment chain set, so use SEQ.
665 BRANCH also has no INSN_SHUID. */
666 if (INSN_SHUID (seq) < INSN_SHUID (dest))
667 {
668 /* Forward branch. */
669 return (insn_last_address + insn_lengths[seq_uid]
670 - align_fuzz (seq, dest, length_unit_log, ~0));
671 }
672 else
673 {
674 /* Backward branch. */
675 return (insn_current_address
676 + align_fuzz (dest, seq, length_unit_log, ~0));
677 }
678 }
679 #endif /* HAVE_ATTR_length */
680 \f
681 /* Compute branch alignments based on frequency information in the
682 CFG. */
683
684 unsigned int
685 compute_alignments (void)
686 {
687 int log, max_skip, max_log;
688 basic_block bb;
689 int freq_max = 0;
690 int freq_threshold = 0;
691
692 if (label_align)
693 {
694 free (label_align);
695 label_align = 0;
696 }
697
698 max_labelno = max_label_num ();
699 min_labelno = get_first_label_num ();
700 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
701
702 /* If not optimizing or optimizing for size, don't assign any alignments. */
703 if (! optimize || optimize_function_for_size_p (cfun))
704 return 0;
705
706 if (dump_file)
707 {
708 dump_flow_info (dump_file, TDF_DETAILS);
709 flow_loops_dump (dump_file, NULL, 1);
710 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
711 }
712 FOR_EACH_BB (bb)
713 if (bb->frequency > freq_max)
714 freq_max = bb->frequency;
715 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
716
717 if (dump_file)
718 fprintf(dump_file, "freq_max: %i\n",freq_max);
719 FOR_EACH_BB (bb)
720 {
721 rtx label = BB_HEAD (bb);
722 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
723 edge e;
724 edge_iterator ei;
725
726 if (!LABEL_P (label)
727 || optimize_bb_for_size_p (bb))
728 {
729 if (dump_file)
730 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
731 bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
732 continue;
733 }
734 max_log = LABEL_ALIGN (label);
735 max_skip = LABEL_ALIGN_MAX_SKIP;
736
737 FOR_EACH_EDGE (e, ei, bb->preds)
738 {
739 if (e->flags & EDGE_FALLTHRU)
740 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
741 else
742 branch_frequency += EDGE_FREQUENCY (e);
743 }
744 if (dump_file)
745 {
746 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
747 bb->index, bb->frequency, bb->loop_father->num,
748 bb->loop_depth,
749 fallthru_frequency, branch_frequency);
750 if (!bb->loop_father->inner && bb->loop_father->num)
751 fprintf (dump_file, " inner_loop");
752 if (bb->loop_father->header == bb)
753 fprintf (dump_file, " loop_header");
754 fprintf (dump_file, "\n");
755 }
756
757 /* There are two purposes to align block with no fallthru incoming edge:
758 1) to avoid fetch stalls when branch destination is near cache boundary
759 2) to improve cache efficiency in case the previous block is not executed
760 (so it does not need to be in the cache).
761
762 We to catch first case, we align frequently executed blocks.
763 To catch the second, we align blocks that are executed more frequently
764 than the predecessor and the predecessor is likely to not be executed
765 when function is called. */
766
767 if (!has_fallthru
768 && (branch_frequency > freq_threshold
769 || (bb->frequency > bb->prev_bb->frequency * 10
770 && (bb->prev_bb->frequency
771 <= ENTRY_BLOCK_PTR->frequency / 2))))
772 {
773 log = JUMP_ALIGN (label);
774 if (dump_file)
775 fprintf(dump_file, " jump alignment added.\n");
776 if (max_log < log)
777 {
778 max_log = log;
779 max_skip = JUMP_ALIGN_MAX_SKIP;
780 }
781 }
782 /* In case block is frequent and reached mostly by non-fallthru edge,
783 align it. It is most likely a first block of loop. */
784 if (has_fallthru
785 && optimize_bb_for_speed_p (bb)
786 && branch_frequency + fallthru_frequency > freq_threshold
787 && (branch_frequency
788 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
789 {
790 log = LOOP_ALIGN (label);
791 if (dump_file)
792 fprintf(dump_file, " internal loop alignment added.\n");
793 if (max_log < log)
794 {
795 max_log = log;
796 max_skip = LOOP_ALIGN_MAX_SKIP;
797 }
798 }
799 LABEL_TO_ALIGNMENT (label) = max_log;
800 LABEL_TO_MAX_SKIP (label) = max_skip;
801 }
802
803 if (dump_file)
804 {
805 loop_optimizer_finalize ();
806 free_dominance_info (CDI_DOMINATORS);
807 }
808 return 0;
809 }
810
811 struct rtl_opt_pass pass_compute_alignments =
812 {
813 {
814 RTL_PASS,
815 "alignments", /* name */
816 NULL, /* gate */
817 compute_alignments, /* execute */
818 NULL, /* sub */
819 NULL, /* next */
820 0, /* static_pass_number */
821 TV_NONE, /* tv_id */
822 0, /* properties_required */
823 0, /* properties_provided */
824 0, /* properties_destroyed */
825 0, /* todo_flags_start */
826 TODO_dump_func | TODO_verify_rtl_sharing
827 | TODO_ggc_collect /* todo_flags_finish */
828 }
829 };
830
831 \f
832 /* Make a pass over all insns and compute their actual lengths by shortening
833 any branches of variable length if possible. */
834
835 /* shorten_branches might be called multiple times: for example, the SH
836 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
837 In order to do this, it needs proper length information, which it obtains
838 by calling shorten_branches. This cannot be collapsed with
839 shorten_branches itself into a single pass unless we also want to integrate
840 reorg.c, since the branch splitting exposes new instructions with delay
841 slots. */
842
843 void
844 shorten_branches (rtx first ATTRIBUTE_UNUSED)
845 {
846 rtx insn;
847 int max_uid;
848 int i;
849 int max_log;
850 int max_skip;
851 #ifdef HAVE_ATTR_length
852 #define MAX_CODE_ALIGN 16
853 rtx seq;
854 int something_changed = 1;
855 char *varying_length;
856 rtx body;
857 int uid;
858 rtx align_tab[MAX_CODE_ALIGN];
859
860 #endif
861
862 /* Compute maximum UID and allocate label_align / uid_shuid. */
863 max_uid = get_max_uid ();
864
865 /* Free uid_shuid before reallocating it. */
866 free (uid_shuid);
867
868 uid_shuid = XNEWVEC (int, max_uid);
869
870 if (max_labelno != max_label_num ())
871 {
872 int old = max_labelno;
873 int n_labels;
874 int n_old_labels;
875
876 max_labelno = max_label_num ();
877
878 n_labels = max_labelno - min_labelno + 1;
879 n_old_labels = old - min_labelno + 1;
880
881 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
882
883 /* Range of labels grows monotonically in the function. Failing here
884 means that the initialization of array got lost. */
885 gcc_assert (n_old_labels <= n_labels);
886
887 memset (label_align + n_old_labels, 0,
888 (n_labels - n_old_labels) * sizeof (struct label_alignment));
889 }
890
891 /* Initialize label_align and set up uid_shuid to be strictly
892 monotonically rising with insn order. */
893 /* We use max_log here to keep track of the maximum alignment we want to
894 impose on the next CODE_LABEL (or the current one if we are processing
895 the CODE_LABEL itself). */
896
897 max_log = 0;
898 max_skip = 0;
899
900 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
901 {
902 int log;
903
904 INSN_SHUID (insn) = i++;
905 if (INSN_P (insn))
906 continue;
907
908 if (LABEL_P (insn))
909 {
910 rtx next;
911 bool next_is_jumptable;
912
913 /* Merge in alignments computed by compute_alignments. */
914 log = LABEL_TO_ALIGNMENT (insn);
915 if (max_log < log)
916 {
917 max_log = log;
918 max_skip = LABEL_TO_MAX_SKIP (insn);
919 }
920
921 next = next_nonnote_insn (insn);
922 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
923 if (!next_is_jumptable)
924 {
925 log = LABEL_ALIGN (insn);
926 if (max_log < log)
927 {
928 max_log = log;
929 max_skip = LABEL_ALIGN_MAX_SKIP;
930 }
931 }
932 /* ADDR_VECs only take room if read-only data goes into the text
933 section. */
934 if ((JUMP_TABLES_IN_TEXT_SECTION
935 || readonly_data_section == text_section)
936 && next_is_jumptable)
937 {
938 log = ADDR_VEC_ALIGN (next);
939 if (max_log < log)
940 {
941 max_log = log;
942 max_skip = LABEL_ALIGN_MAX_SKIP;
943 }
944 }
945 LABEL_TO_ALIGNMENT (insn) = max_log;
946 LABEL_TO_MAX_SKIP (insn) = max_skip;
947 max_log = 0;
948 max_skip = 0;
949 }
950 else if (BARRIER_P (insn))
951 {
952 rtx label;
953
954 for (label = insn; label && ! INSN_P (label);
955 label = NEXT_INSN (label))
956 if (LABEL_P (label))
957 {
958 log = LABEL_ALIGN_AFTER_BARRIER (insn);
959 if (max_log < log)
960 {
961 max_log = log;
962 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
963 }
964 break;
965 }
966 }
967 }
968 #ifdef HAVE_ATTR_length
969
970 /* Allocate the rest of the arrays. */
971 insn_lengths = XNEWVEC (int, max_uid);
972 insn_lengths_max_uid = max_uid;
973 /* Syntax errors can lead to labels being outside of the main insn stream.
974 Initialize insn_addresses, so that we get reproducible results. */
975 INSN_ADDRESSES_ALLOC (max_uid);
976
977 varying_length = XCNEWVEC (char, max_uid);
978
979 /* Initialize uid_align. We scan instructions
980 from end to start, and keep in align_tab[n] the last seen insn
981 that does an alignment of at least n+1, i.e. the successor
982 in the alignment chain for an insn that does / has a known
983 alignment of n. */
984 uid_align = XCNEWVEC (rtx, max_uid);
985
986 for (i = MAX_CODE_ALIGN; --i >= 0;)
987 align_tab[i] = NULL_RTX;
988 seq = get_last_insn ();
989 for (; seq; seq = PREV_INSN (seq))
990 {
991 int uid = INSN_UID (seq);
992 int log;
993 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
994 uid_align[uid] = align_tab[0];
995 if (log)
996 {
997 /* Found an alignment label. */
998 uid_align[uid] = align_tab[log];
999 for (i = log - 1; i >= 0; i--)
1000 align_tab[i] = seq;
1001 }
1002 }
1003 #ifdef CASE_VECTOR_SHORTEN_MODE
1004 if (optimize)
1005 {
1006 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1007 label fields. */
1008
1009 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1010 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1011 int rel;
1012
1013 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1014 {
1015 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1016 int len, i, min, max, insn_shuid;
1017 int min_align;
1018 addr_diff_vec_flags flags;
1019
1020 if (!JUMP_P (insn)
1021 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1022 continue;
1023 pat = PATTERN (insn);
1024 len = XVECLEN (pat, 1);
1025 gcc_assert (len > 0);
1026 min_align = MAX_CODE_ALIGN;
1027 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1028 {
1029 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1030 int shuid = INSN_SHUID (lab);
1031 if (shuid < min)
1032 {
1033 min = shuid;
1034 min_lab = lab;
1035 }
1036 if (shuid > max)
1037 {
1038 max = shuid;
1039 max_lab = lab;
1040 }
1041 if (min_align > LABEL_TO_ALIGNMENT (lab))
1042 min_align = LABEL_TO_ALIGNMENT (lab);
1043 }
1044 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1045 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1046 insn_shuid = INSN_SHUID (insn);
1047 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1048 memset (&flags, 0, sizeof (flags));
1049 flags.min_align = min_align;
1050 flags.base_after_vec = rel > insn_shuid;
1051 flags.min_after_vec = min > insn_shuid;
1052 flags.max_after_vec = max > insn_shuid;
1053 flags.min_after_base = min > rel;
1054 flags.max_after_base = max > rel;
1055 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1056 }
1057 }
1058 #endif /* CASE_VECTOR_SHORTEN_MODE */
1059
1060 /* Compute initial lengths, addresses, and varying flags for each insn. */
1061 for (insn_current_address = 0, insn = first;
1062 insn != 0;
1063 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1064 {
1065 uid = INSN_UID (insn);
1066
1067 insn_lengths[uid] = 0;
1068
1069 if (LABEL_P (insn))
1070 {
1071 int log = LABEL_TO_ALIGNMENT (insn);
1072 if (log)
1073 {
1074 int align = 1 << log;
1075 int new_address = (insn_current_address + align - 1) & -align;
1076 insn_lengths[uid] = new_address - insn_current_address;
1077 }
1078 }
1079
1080 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1081
1082 if (NOTE_P (insn) || BARRIER_P (insn)
1083 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1084 continue;
1085 if (INSN_DELETED_P (insn))
1086 continue;
1087
1088 body = PATTERN (insn);
1089 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1090 {
1091 /* This only takes room if read-only data goes into the text
1092 section. */
1093 if (JUMP_TABLES_IN_TEXT_SECTION
1094 || readonly_data_section == text_section)
1095 insn_lengths[uid] = (XVECLEN (body,
1096 GET_CODE (body) == ADDR_DIFF_VEC)
1097 * GET_MODE_SIZE (GET_MODE (body)));
1098 /* Alignment is handled by ADDR_VEC_ALIGN. */
1099 }
1100 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1101 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1102 else if (GET_CODE (body) == SEQUENCE)
1103 {
1104 int i;
1105 int const_delay_slots;
1106 #ifdef DELAY_SLOTS
1107 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1108 #else
1109 const_delay_slots = 0;
1110 #endif
1111 /* Inside a delay slot sequence, we do not do any branch shortening
1112 if the shortening could change the number of delay slots
1113 of the branch. */
1114 for (i = 0; i < XVECLEN (body, 0); i++)
1115 {
1116 rtx inner_insn = XVECEXP (body, 0, i);
1117 int inner_uid = INSN_UID (inner_insn);
1118 int inner_length;
1119
1120 if (GET_CODE (body) == ASM_INPUT
1121 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1122 inner_length = (asm_insn_count (PATTERN (inner_insn))
1123 * insn_default_length (inner_insn));
1124 else
1125 inner_length = insn_default_length (inner_insn);
1126
1127 insn_lengths[inner_uid] = inner_length;
1128 if (const_delay_slots)
1129 {
1130 if ((varying_length[inner_uid]
1131 = insn_variable_length_p (inner_insn)) != 0)
1132 varying_length[uid] = 1;
1133 INSN_ADDRESSES (inner_uid) = (insn_current_address
1134 + insn_lengths[uid]);
1135 }
1136 else
1137 varying_length[inner_uid] = 0;
1138 insn_lengths[uid] += inner_length;
1139 }
1140 }
1141 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1142 {
1143 insn_lengths[uid] = insn_default_length (insn);
1144 varying_length[uid] = insn_variable_length_p (insn);
1145 }
1146
1147 /* If needed, do any adjustment. */
1148 #ifdef ADJUST_INSN_LENGTH
1149 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1150 if (insn_lengths[uid] < 0)
1151 fatal_insn ("negative insn length", insn);
1152 #endif
1153 }
1154
1155 /* Now loop over all the insns finding varying length insns. For each,
1156 get the current insn length. If it has changed, reflect the change.
1157 When nothing changes for a full pass, we are done. */
1158
1159 while (something_changed)
1160 {
1161 something_changed = 0;
1162 insn_current_align = MAX_CODE_ALIGN - 1;
1163 for (insn_current_address = 0, insn = first;
1164 insn != 0;
1165 insn = NEXT_INSN (insn))
1166 {
1167 int new_length;
1168 #ifdef ADJUST_INSN_LENGTH
1169 int tmp_length;
1170 #endif
1171 int length_align;
1172
1173 uid = INSN_UID (insn);
1174
1175 if (LABEL_P (insn))
1176 {
1177 int log = LABEL_TO_ALIGNMENT (insn);
1178 if (log > insn_current_align)
1179 {
1180 int align = 1 << log;
1181 int new_address= (insn_current_address + align - 1) & -align;
1182 insn_lengths[uid] = new_address - insn_current_address;
1183 insn_current_align = log;
1184 insn_current_address = new_address;
1185 }
1186 else
1187 insn_lengths[uid] = 0;
1188 INSN_ADDRESSES (uid) = insn_current_address;
1189 continue;
1190 }
1191
1192 length_align = INSN_LENGTH_ALIGNMENT (insn);
1193 if (length_align < insn_current_align)
1194 insn_current_align = length_align;
1195
1196 insn_last_address = INSN_ADDRESSES (uid);
1197 INSN_ADDRESSES (uid) = insn_current_address;
1198
1199 #ifdef CASE_VECTOR_SHORTEN_MODE
1200 if (optimize && JUMP_P (insn)
1201 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1202 {
1203 rtx body = PATTERN (insn);
1204 int old_length = insn_lengths[uid];
1205 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1206 rtx min_lab = XEXP (XEXP (body, 2), 0);
1207 rtx max_lab = XEXP (XEXP (body, 3), 0);
1208 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1209 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1210 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1211 rtx prev;
1212 int rel_align = 0;
1213 addr_diff_vec_flags flags;
1214
1215 /* Avoid automatic aggregate initialization. */
1216 flags = ADDR_DIFF_VEC_FLAGS (body);
1217
1218 /* Try to find a known alignment for rel_lab. */
1219 for (prev = rel_lab;
1220 prev
1221 && ! insn_lengths[INSN_UID (prev)]
1222 && ! (varying_length[INSN_UID (prev)] & 1);
1223 prev = PREV_INSN (prev))
1224 if (varying_length[INSN_UID (prev)] & 2)
1225 {
1226 rel_align = LABEL_TO_ALIGNMENT (prev);
1227 break;
1228 }
1229
1230 /* See the comment on addr_diff_vec_flags in rtl.h for the
1231 meaning of the flags values. base: REL_LAB vec: INSN */
1232 /* Anything after INSN has still addresses from the last
1233 pass; adjust these so that they reflect our current
1234 estimate for this pass. */
1235 if (flags.base_after_vec)
1236 rel_addr += insn_current_address - insn_last_address;
1237 if (flags.min_after_vec)
1238 min_addr += insn_current_address - insn_last_address;
1239 if (flags.max_after_vec)
1240 max_addr += insn_current_address - insn_last_address;
1241 /* We want to know the worst case, i.e. lowest possible value
1242 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1243 its offset is positive, and we have to be wary of code shrink;
1244 otherwise, it is negative, and we have to be vary of code
1245 size increase. */
1246 if (flags.min_after_base)
1247 {
1248 /* If INSN is between REL_LAB and MIN_LAB, the size
1249 changes we are about to make can change the alignment
1250 within the observed offset, therefore we have to break
1251 it up into two parts that are independent. */
1252 if (! flags.base_after_vec && flags.min_after_vec)
1253 {
1254 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1255 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1256 }
1257 else
1258 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1259 }
1260 else
1261 {
1262 if (flags.base_after_vec && ! flags.min_after_vec)
1263 {
1264 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1265 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1266 }
1267 else
1268 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1269 }
1270 /* Likewise, determine the highest lowest possible value
1271 for the offset of MAX_LAB. */
1272 if (flags.max_after_base)
1273 {
1274 if (! flags.base_after_vec && flags.max_after_vec)
1275 {
1276 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1277 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1278 }
1279 else
1280 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1281 }
1282 else
1283 {
1284 if (flags.base_after_vec && ! flags.max_after_vec)
1285 {
1286 max_addr += align_fuzz (max_lab, insn, 0, 0);
1287 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1288 }
1289 else
1290 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1291 }
1292 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1293 max_addr - rel_addr,
1294 body));
1295 if (JUMP_TABLES_IN_TEXT_SECTION
1296 || readonly_data_section == text_section)
1297 {
1298 insn_lengths[uid]
1299 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1300 insn_current_address += insn_lengths[uid];
1301 if (insn_lengths[uid] != old_length)
1302 something_changed = 1;
1303 }
1304
1305 continue;
1306 }
1307 #endif /* CASE_VECTOR_SHORTEN_MODE */
1308
1309 if (! (varying_length[uid]))
1310 {
1311 if (NONJUMP_INSN_P (insn)
1312 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1313 {
1314 int i;
1315
1316 body = PATTERN (insn);
1317 for (i = 0; i < XVECLEN (body, 0); i++)
1318 {
1319 rtx inner_insn = XVECEXP (body, 0, i);
1320 int inner_uid = INSN_UID (inner_insn);
1321
1322 INSN_ADDRESSES (inner_uid) = insn_current_address;
1323
1324 insn_current_address += insn_lengths[inner_uid];
1325 }
1326 }
1327 else
1328 insn_current_address += insn_lengths[uid];
1329
1330 continue;
1331 }
1332
1333 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1334 {
1335 int i;
1336
1337 body = PATTERN (insn);
1338 new_length = 0;
1339 for (i = 0; i < XVECLEN (body, 0); i++)
1340 {
1341 rtx inner_insn = XVECEXP (body, 0, i);
1342 int inner_uid = INSN_UID (inner_insn);
1343 int inner_length;
1344
1345 INSN_ADDRESSES (inner_uid) = insn_current_address;
1346
1347 /* insn_current_length returns 0 for insns with a
1348 non-varying length. */
1349 if (! varying_length[inner_uid])
1350 inner_length = insn_lengths[inner_uid];
1351 else
1352 inner_length = insn_current_length (inner_insn);
1353
1354 if (inner_length != insn_lengths[inner_uid])
1355 {
1356 insn_lengths[inner_uid] = inner_length;
1357 something_changed = 1;
1358 }
1359 insn_current_address += insn_lengths[inner_uid];
1360 new_length += inner_length;
1361 }
1362 }
1363 else
1364 {
1365 new_length = insn_current_length (insn);
1366 insn_current_address += new_length;
1367 }
1368
1369 #ifdef ADJUST_INSN_LENGTH
1370 /* If needed, do any adjustment. */
1371 tmp_length = new_length;
1372 ADJUST_INSN_LENGTH (insn, new_length);
1373 insn_current_address += (new_length - tmp_length);
1374 #endif
1375
1376 if (new_length != insn_lengths[uid])
1377 {
1378 insn_lengths[uid] = new_length;
1379 something_changed = 1;
1380 }
1381 }
1382 /* For a non-optimizing compile, do only a single pass. */
1383 if (!optimize)
1384 break;
1385 }
1386
1387 free (varying_length);
1388
1389 #endif /* HAVE_ATTR_length */
1390 }
1391
1392 #ifdef HAVE_ATTR_length
1393 /* Given the body of an INSN known to be generated by an ASM statement, return
1394 the number of machine instructions likely to be generated for this insn.
1395 This is used to compute its length. */
1396
1397 static int
1398 asm_insn_count (rtx body)
1399 {
1400 const char *templ;
1401
1402 if (GET_CODE (body) == ASM_INPUT)
1403 templ = XSTR (body, 0);
1404 else
1405 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1406
1407 return asm_str_count (templ);
1408 }
1409 #endif
1410
1411 /* Return the number of machine instructions likely to be generated for the
1412 inline-asm template. */
1413 int
1414 asm_str_count (const char *templ)
1415 {
1416 int count = 1;
1417
1418 if (!*templ)
1419 return 0;
1420
1421 for (; *templ; templ++)
1422 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1423 || *templ == '\n')
1424 count++;
1425
1426 return count;
1427 }
1428 \f
1429 /* ??? This is probably the wrong place for these. */
1430 /* Structure recording the mapping from source file and directory
1431 names at compile time to those to be embedded in debug
1432 information. */
1433 typedef struct debug_prefix_map
1434 {
1435 const char *old_prefix;
1436 const char *new_prefix;
1437 size_t old_len;
1438 size_t new_len;
1439 struct debug_prefix_map *next;
1440 } debug_prefix_map;
1441
1442 /* Linked list of such structures. */
1443 debug_prefix_map *debug_prefix_maps;
1444
1445
1446 /* Record a debug file prefix mapping. ARG is the argument to
1447 -fdebug-prefix-map and must be of the form OLD=NEW. */
1448
1449 void
1450 add_debug_prefix_map (const char *arg)
1451 {
1452 debug_prefix_map *map;
1453 const char *p;
1454
1455 p = strchr (arg, '=');
1456 if (!p)
1457 {
1458 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1459 return;
1460 }
1461 map = XNEW (debug_prefix_map);
1462 map->old_prefix = xstrndup (arg, p - arg);
1463 map->old_len = p - arg;
1464 p++;
1465 map->new_prefix = xstrdup (p);
1466 map->new_len = strlen (p);
1467 map->next = debug_prefix_maps;
1468 debug_prefix_maps = map;
1469 }
1470
1471 /* Perform user-specified mapping of debug filename prefixes. Return
1472 the new name corresponding to FILENAME. */
1473
1474 const char *
1475 remap_debug_filename (const char *filename)
1476 {
1477 debug_prefix_map *map;
1478 char *s;
1479 const char *name;
1480 size_t name_len;
1481
1482 for (map = debug_prefix_maps; map; map = map->next)
1483 if (strncmp (filename, map->old_prefix, map->old_len) == 0)
1484 break;
1485 if (!map)
1486 return filename;
1487 name = filename + map->old_len;
1488 name_len = strlen (name) + 1;
1489 s = (char *) alloca (name_len + map->new_len);
1490 memcpy (s, map->new_prefix, map->new_len);
1491 memcpy (s + map->new_len, name, name_len);
1492 return ggc_strdup (s);
1493 }
1494 \f
1495 /* Return true if DWARF2 debug info can be emitted for DECL. */
1496
1497 static bool
1498 dwarf2_debug_info_emitted_p (tree decl)
1499 {
1500 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1501 return false;
1502
1503 if (DECL_IGNORED_P (decl))
1504 return false;
1505
1506 return true;
1507 }
1508
1509 /* Output assembler code for the start of a function,
1510 and initialize some of the variables in this file
1511 for the new function. The label for the function and associated
1512 assembler pseudo-ops have already been output in `assemble_start_function'.
1513
1514 FIRST is the first insn of the rtl for the function being compiled.
1515 FILE is the file to write assembler code to.
1516 OPTIMIZE is nonzero if we should eliminate redundant
1517 test and compare insns. */
1518
1519 void
1520 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1521 int optimize ATTRIBUTE_UNUSED)
1522 {
1523 block_depth = 0;
1524
1525 this_is_asm_operands = 0;
1526
1527 last_filename = locator_file (prologue_locator);
1528 last_linenum = locator_line (prologue_locator);
1529 last_discriminator = discriminator = 0;
1530
1531 high_block_linenum = high_function_linenum = last_linenum;
1532
1533 if (!DECL_IGNORED_P (current_function_decl))
1534 debug_hooks->begin_prologue (last_linenum, last_filename);
1535
1536 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1537 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1538 dwarf2out_begin_prologue (0, NULL);
1539 #endif
1540
1541 #ifdef LEAF_REG_REMAP
1542 if (current_function_uses_only_leaf_regs)
1543 leaf_renumber_regs (first);
1544 #endif
1545
1546 /* The Sun386i and perhaps other machines don't work right
1547 if the profiling code comes after the prologue. */
1548 #ifdef PROFILE_BEFORE_PROLOGUE
1549 if (crtl->profile)
1550 profile_function (file);
1551 #endif /* PROFILE_BEFORE_PROLOGUE */
1552
1553 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1554 if (dwarf2out_do_frame ())
1555 dwarf2out_frame_debug (NULL_RTX, false);
1556 #endif
1557
1558 /* If debugging, assign block numbers to all of the blocks in this
1559 function. */
1560 if (write_symbols)
1561 {
1562 reemit_insn_block_notes ();
1563 number_blocks (current_function_decl);
1564 /* We never actually put out begin/end notes for the top-level
1565 block in the function. But, conceptually, that block is
1566 always needed. */
1567 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1568 }
1569
1570 if (warn_frame_larger_than
1571 && get_frame_size () > frame_larger_than_size)
1572 {
1573 /* Issue a warning */
1574 warning (OPT_Wframe_larger_than_,
1575 "the frame size of %wd bytes is larger than %wd bytes",
1576 get_frame_size (), frame_larger_than_size);
1577 }
1578
1579 /* First output the function prologue: code to set up the stack frame. */
1580 targetm.asm_out.function_prologue (file, get_frame_size ());
1581
1582 /* If the machine represents the prologue as RTL, the profiling code must
1583 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1584 #ifdef HAVE_prologue
1585 if (! HAVE_prologue)
1586 #endif
1587 profile_after_prologue (file);
1588 }
1589
1590 static void
1591 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1592 {
1593 #ifndef PROFILE_BEFORE_PROLOGUE
1594 if (crtl->profile)
1595 profile_function (file);
1596 #endif /* not PROFILE_BEFORE_PROLOGUE */
1597 }
1598
1599 static void
1600 profile_function (FILE *file ATTRIBUTE_UNUSED)
1601 {
1602 #ifndef NO_PROFILE_COUNTERS
1603 # define NO_PROFILE_COUNTERS 0
1604 #endif
1605 #ifdef ASM_OUTPUT_REG_PUSH
1606 rtx sval = NULL, chain = NULL;
1607
1608 if (cfun->returns_struct)
1609 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1610 true);
1611 if (cfun->static_chain_decl)
1612 chain = targetm.calls.static_chain (current_function_decl, true);
1613 #endif /* ASM_OUTPUT_REG_PUSH */
1614
1615 if (! NO_PROFILE_COUNTERS)
1616 {
1617 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1618 switch_to_section (data_section);
1619 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1620 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1621 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1622 }
1623
1624 switch_to_section (current_function_section ());
1625
1626 #ifdef ASM_OUTPUT_REG_PUSH
1627 if (sval && REG_P (sval))
1628 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1629 if (chain && REG_P (chain))
1630 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1631 #endif
1632
1633 FUNCTION_PROFILER (file, current_function_funcdef_no);
1634
1635 #ifdef ASM_OUTPUT_REG_PUSH
1636 if (chain && REG_P (chain))
1637 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1638 if (sval && REG_P (sval))
1639 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1640 #endif
1641 }
1642
1643 /* Output assembler code for the end of a function.
1644 For clarity, args are same as those of `final_start_function'
1645 even though not all of them are needed. */
1646
1647 void
1648 final_end_function (void)
1649 {
1650 app_disable ();
1651
1652 if (!DECL_IGNORED_P (current_function_decl))
1653 debug_hooks->end_function (high_function_linenum);
1654
1655 /* Finally, output the function epilogue:
1656 code to restore the stack frame and return to the caller. */
1657 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1658
1659 /* And debug output. */
1660 if (!DECL_IGNORED_P (current_function_decl))
1661 debug_hooks->end_epilogue (last_linenum, last_filename);
1662
1663 #if defined (DWARF2_UNWIND_INFO)
1664 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1665 && dwarf2out_do_frame ())
1666 dwarf2out_end_epilogue (last_linenum, last_filename);
1667 #endif
1668 }
1669 \f
1670 /* Output assembler code for some insns: all or part of a function.
1671 For description of args, see `final_start_function', above. */
1672
1673 void
1674 final (rtx first, FILE *file, int optimize)
1675 {
1676 rtx insn;
1677 int max_uid = 0;
1678 int seen = 0;
1679
1680 last_ignored_compare = 0;
1681
1682 for (insn = first; insn; insn = NEXT_INSN (insn))
1683 {
1684 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1685 max_uid = INSN_UID (insn);
1686 #ifdef HAVE_cc0
1687 /* If CC tracking across branches is enabled, record the insn which
1688 jumps to each branch only reached from one place. */
1689 if (optimize && JUMP_P (insn))
1690 {
1691 rtx lab = JUMP_LABEL (insn);
1692 if (lab && LABEL_NUSES (lab) == 1)
1693 {
1694 LABEL_REFS (lab) = insn;
1695 }
1696 }
1697 #endif
1698 }
1699
1700 init_recog ();
1701
1702 CC_STATUS_INIT;
1703
1704 /* Output the insns. */
1705 for (insn = first; insn;)
1706 {
1707 #ifdef HAVE_ATTR_length
1708 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1709 {
1710 /* This can be triggered by bugs elsewhere in the compiler if
1711 new insns are created after init_insn_lengths is called. */
1712 gcc_assert (NOTE_P (insn));
1713 insn_current_address = -1;
1714 }
1715 else
1716 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1717 #endif /* HAVE_ATTR_length */
1718
1719 insn = final_scan_insn (insn, file, optimize, 0, &seen);
1720 }
1721 }
1722 \f
1723 const char *
1724 get_insn_template (int code, rtx insn)
1725 {
1726 switch (insn_data[code].output_format)
1727 {
1728 case INSN_OUTPUT_FORMAT_SINGLE:
1729 return insn_data[code].output.single;
1730 case INSN_OUTPUT_FORMAT_MULTI:
1731 return insn_data[code].output.multi[which_alternative];
1732 case INSN_OUTPUT_FORMAT_FUNCTION:
1733 gcc_assert (insn);
1734 return (*insn_data[code].output.function) (recog_data.operand, insn);
1735
1736 default:
1737 gcc_unreachable ();
1738 }
1739 }
1740
1741 /* Emit the appropriate declaration for an alternate-entry-point
1742 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1743 LABEL_KIND != LABEL_NORMAL.
1744
1745 The case fall-through in this function is intentional. */
1746 static void
1747 output_alternate_entry_point (FILE *file, rtx insn)
1748 {
1749 const char *name = LABEL_NAME (insn);
1750
1751 switch (LABEL_KIND (insn))
1752 {
1753 case LABEL_WEAK_ENTRY:
1754 #ifdef ASM_WEAKEN_LABEL
1755 ASM_WEAKEN_LABEL (file, name);
1756 #endif
1757 case LABEL_GLOBAL_ENTRY:
1758 targetm.asm_out.globalize_label (file, name);
1759 case LABEL_STATIC_ENTRY:
1760 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1761 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1762 #endif
1763 ASM_OUTPUT_LABEL (file, name);
1764 break;
1765
1766 case LABEL_NORMAL:
1767 default:
1768 gcc_unreachable ();
1769 }
1770 }
1771
1772 /* Given a CALL_INSN, find and return the nested CALL. */
1773 static rtx
1774 call_from_call_insn (rtx insn)
1775 {
1776 rtx x;
1777 gcc_assert (CALL_P (insn));
1778 x = PATTERN (insn);
1779
1780 while (GET_CODE (x) != CALL)
1781 {
1782 switch (GET_CODE (x))
1783 {
1784 default:
1785 gcc_unreachable ();
1786 case COND_EXEC:
1787 x = COND_EXEC_CODE (x);
1788 break;
1789 case PARALLEL:
1790 x = XVECEXP (x, 0, 0);
1791 break;
1792 case SET:
1793 x = XEXP (x, 1);
1794 break;
1795 }
1796 }
1797 return x;
1798 }
1799
1800 /* The final scan for one insn, INSN.
1801 Args are same as in `final', except that INSN
1802 is the insn being scanned.
1803 Value returned is the next insn to be scanned.
1804
1805 NOPEEPHOLES is the flag to disallow peephole processing (currently
1806 used for within delayed branch sequence output).
1807
1808 SEEN is used to track the end of the prologue, for emitting
1809 debug information. We force the emission of a line note after
1810 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1811 at the beginning of the second basic block, whichever comes
1812 first. */
1813
1814 rtx
1815 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1816 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1817 {
1818 #ifdef HAVE_cc0
1819 rtx set;
1820 #endif
1821 rtx next;
1822
1823 insn_counter++;
1824
1825 /* Ignore deleted insns. These can occur when we split insns (due to a
1826 template of "#") while not optimizing. */
1827 if (INSN_DELETED_P (insn))
1828 return NEXT_INSN (insn);
1829
1830 switch (GET_CODE (insn))
1831 {
1832 case NOTE:
1833 switch (NOTE_KIND (insn))
1834 {
1835 case NOTE_INSN_DELETED:
1836 break;
1837
1838 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1839 in_cold_section_p = !in_cold_section_p;
1840 #ifdef DWARF2_UNWIND_INFO
1841 if (dwarf2out_do_frame ())
1842 dwarf2out_switch_text_section ();
1843 else
1844 #endif
1845 if (!DECL_IGNORED_P (current_function_decl))
1846 debug_hooks->switch_text_section ();
1847
1848 switch_to_section (current_function_section ());
1849 break;
1850
1851 case NOTE_INSN_BASIC_BLOCK:
1852 #ifdef TARGET_UNWIND_INFO
1853 targetm.asm_out.unwind_emit (asm_out_file, insn);
1854 #endif
1855
1856 if (flag_debug_asm)
1857 fprintf (asm_out_file, "\t%s basic block %d\n",
1858 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1859
1860 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1861 {
1862 *seen |= SEEN_EMITTED;
1863 force_source_line = true;
1864 }
1865 else
1866 *seen |= SEEN_BB;
1867
1868 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
1869
1870 break;
1871
1872 case NOTE_INSN_EH_REGION_BEG:
1873 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1874 NOTE_EH_HANDLER (insn));
1875 break;
1876
1877 case NOTE_INSN_EH_REGION_END:
1878 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1879 NOTE_EH_HANDLER (insn));
1880 break;
1881
1882 case NOTE_INSN_PROLOGUE_END:
1883 targetm.asm_out.function_end_prologue (file);
1884 profile_after_prologue (file);
1885
1886 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1887 {
1888 *seen |= SEEN_EMITTED;
1889 force_source_line = true;
1890 }
1891 else
1892 *seen |= SEEN_NOTE;
1893
1894 break;
1895
1896 case NOTE_INSN_EPILOGUE_BEG:
1897 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_epilogue)
1898 if (dwarf2out_do_frame ())
1899 dwarf2out_cfi_begin_epilogue (insn);
1900 #endif
1901 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
1902 targetm.asm_out.function_begin_epilogue (file);
1903 break;
1904
1905 case NOTE_INSN_CFA_RESTORE_STATE:
1906 #if defined (DWARF2_UNWIND_INFO)
1907 dwarf2out_frame_debug_restore_state ();
1908 #endif
1909 break;
1910
1911 case NOTE_INSN_FUNCTION_BEG:
1912 app_disable ();
1913 if (!DECL_IGNORED_P (current_function_decl))
1914 debug_hooks->end_prologue (last_linenum, last_filename);
1915
1916 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1917 {
1918 *seen |= SEEN_EMITTED;
1919 force_source_line = true;
1920 }
1921 else
1922 *seen |= SEEN_NOTE;
1923
1924 break;
1925
1926 case NOTE_INSN_BLOCK_BEG:
1927 if (debug_info_level == DINFO_LEVEL_NORMAL
1928 || debug_info_level == DINFO_LEVEL_VERBOSE
1929 || write_symbols == DWARF2_DEBUG
1930 || write_symbols == VMS_AND_DWARF2_DEBUG
1931 || write_symbols == VMS_DEBUG)
1932 {
1933 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1934
1935 app_disable ();
1936 ++block_depth;
1937 high_block_linenum = last_linenum;
1938
1939 /* Output debugging info about the symbol-block beginning. */
1940 if (!DECL_IGNORED_P (current_function_decl))
1941 debug_hooks->begin_block (last_linenum, n);
1942
1943 /* Mark this block as output. */
1944 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1945 }
1946 if (write_symbols == DBX_DEBUG
1947 || write_symbols == SDB_DEBUG)
1948 {
1949 location_t *locus_ptr
1950 = block_nonartificial_location (NOTE_BLOCK (insn));
1951
1952 if (locus_ptr != NULL)
1953 {
1954 override_filename = LOCATION_FILE (*locus_ptr);
1955 override_linenum = LOCATION_LINE (*locus_ptr);
1956 }
1957 }
1958 break;
1959
1960 case NOTE_INSN_BLOCK_END:
1961 if (debug_info_level == DINFO_LEVEL_NORMAL
1962 || debug_info_level == DINFO_LEVEL_VERBOSE
1963 || write_symbols == DWARF2_DEBUG
1964 || write_symbols == VMS_AND_DWARF2_DEBUG
1965 || write_symbols == VMS_DEBUG)
1966 {
1967 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1968
1969 app_disable ();
1970
1971 /* End of a symbol-block. */
1972 --block_depth;
1973 gcc_assert (block_depth >= 0);
1974
1975 if (!DECL_IGNORED_P (current_function_decl))
1976 debug_hooks->end_block (high_block_linenum, n);
1977 }
1978 if (write_symbols == DBX_DEBUG
1979 || write_symbols == SDB_DEBUG)
1980 {
1981 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
1982 location_t *locus_ptr
1983 = block_nonartificial_location (outer_block);
1984
1985 if (locus_ptr != NULL)
1986 {
1987 override_filename = LOCATION_FILE (*locus_ptr);
1988 override_linenum = LOCATION_LINE (*locus_ptr);
1989 }
1990 else
1991 {
1992 override_filename = NULL;
1993 override_linenum = 0;
1994 }
1995 }
1996 break;
1997
1998 case NOTE_INSN_DELETED_LABEL:
1999 /* Emit the label. We may have deleted the CODE_LABEL because
2000 the label could be proved to be unreachable, though still
2001 referenced (in the form of having its address taken. */
2002 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2003 break;
2004
2005 case NOTE_INSN_VAR_LOCATION:
2006 if (!DECL_IGNORED_P (current_function_decl))
2007 debug_hooks->var_location (insn);
2008 break;
2009
2010 default:
2011 gcc_unreachable ();
2012 break;
2013 }
2014 break;
2015
2016 case BARRIER:
2017 #if defined (DWARF2_UNWIND_INFO)
2018 if (dwarf2out_do_frame ())
2019 dwarf2out_frame_debug (insn, false);
2020 #endif
2021 break;
2022
2023 case CODE_LABEL:
2024 /* The target port might emit labels in the output function for
2025 some insn, e.g. sh.c output_branchy_insn. */
2026 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2027 {
2028 int align = LABEL_TO_ALIGNMENT (insn);
2029 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2030 int max_skip = LABEL_TO_MAX_SKIP (insn);
2031 #endif
2032
2033 if (align && NEXT_INSN (insn))
2034 {
2035 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2036 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2037 #else
2038 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2039 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2040 #else
2041 ASM_OUTPUT_ALIGN (file, align);
2042 #endif
2043 #endif
2044 }
2045 }
2046 #ifdef HAVE_cc0
2047 CC_STATUS_INIT;
2048 #endif
2049
2050 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2051 debug_hooks->label (insn);
2052
2053 app_disable ();
2054
2055 next = next_nonnote_insn (insn);
2056 /* If this label is followed by a jump-table, make sure we put
2057 the label in the read-only section. Also possibly write the
2058 label and jump table together. */
2059 if (next != 0 && JUMP_TABLE_DATA_P (next))
2060 {
2061 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2062 /* In this case, the case vector is being moved by the
2063 target, so don't output the label at all. Leave that
2064 to the back end macros. */
2065 #else
2066 if (! JUMP_TABLES_IN_TEXT_SECTION)
2067 {
2068 int log_align;
2069
2070 switch_to_section (targetm.asm_out.function_rodata_section
2071 (current_function_decl));
2072
2073 #ifdef ADDR_VEC_ALIGN
2074 log_align = ADDR_VEC_ALIGN (next);
2075 #else
2076 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2077 #endif
2078 ASM_OUTPUT_ALIGN (file, log_align);
2079 }
2080 else
2081 switch_to_section (current_function_section ());
2082
2083 #ifdef ASM_OUTPUT_CASE_LABEL
2084 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2085 next);
2086 #else
2087 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2088 #endif
2089 #endif
2090 break;
2091 }
2092 if (LABEL_ALT_ENTRY_P (insn))
2093 output_alternate_entry_point (file, insn);
2094 else
2095 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2096 break;
2097
2098 default:
2099 {
2100 rtx body = PATTERN (insn);
2101 int insn_code_number;
2102 const char *templ;
2103 bool is_stmt;
2104
2105 /* Reset this early so it is correct for ASM statements. */
2106 current_insn_predicate = NULL_RTX;
2107
2108 /* An INSN, JUMP_INSN or CALL_INSN.
2109 First check for special kinds that recog doesn't recognize. */
2110
2111 if (GET_CODE (body) == USE /* These are just declarations. */
2112 || GET_CODE (body) == CLOBBER)
2113 break;
2114
2115 #ifdef HAVE_cc0
2116 {
2117 /* If there is a REG_CC_SETTER note on this insn, it means that
2118 the setting of the condition code was done in the delay slot
2119 of the insn that branched here. So recover the cc status
2120 from the insn that set it. */
2121
2122 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2123 if (note)
2124 {
2125 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2126 cc_prev_status = cc_status;
2127 }
2128 }
2129 #endif
2130
2131 /* Detect insns that are really jump-tables
2132 and output them as such. */
2133
2134 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2135 {
2136 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2137 int vlen, idx;
2138 #endif
2139
2140 if (! JUMP_TABLES_IN_TEXT_SECTION)
2141 switch_to_section (targetm.asm_out.function_rodata_section
2142 (current_function_decl));
2143 else
2144 switch_to_section (current_function_section ());
2145
2146 app_disable ();
2147
2148 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2149 if (GET_CODE (body) == ADDR_VEC)
2150 {
2151 #ifdef ASM_OUTPUT_ADDR_VEC
2152 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2153 #else
2154 gcc_unreachable ();
2155 #endif
2156 }
2157 else
2158 {
2159 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2160 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2161 #else
2162 gcc_unreachable ();
2163 #endif
2164 }
2165 #else
2166 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2167 for (idx = 0; idx < vlen; idx++)
2168 {
2169 if (GET_CODE (body) == ADDR_VEC)
2170 {
2171 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2172 ASM_OUTPUT_ADDR_VEC_ELT
2173 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2174 #else
2175 gcc_unreachable ();
2176 #endif
2177 }
2178 else
2179 {
2180 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2181 ASM_OUTPUT_ADDR_DIFF_ELT
2182 (file,
2183 body,
2184 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2185 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2186 #else
2187 gcc_unreachable ();
2188 #endif
2189 }
2190 }
2191 #ifdef ASM_OUTPUT_CASE_END
2192 ASM_OUTPUT_CASE_END (file,
2193 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2194 insn);
2195 #endif
2196 #endif
2197
2198 switch_to_section (current_function_section ());
2199
2200 break;
2201 }
2202 /* Output this line note if it is the first or the last line
2203 note in a row. */
2204 if (!DECL_IGNORED_P (current_function_decl)
2205 && notice_source_line (insn, &is_stmt))
2206 (*debug_hooks->source_line) (last_linenum, last_filename,
2207 last_discriminator, is_stmt);
2208
2209 if (GET_CODE (body) == ASM_INPUT)
2210 {
2211 const char *string = XSTR (body, 0);
2212
2213 /* There's no telling what that did to the condition codes. */
2214 CC_STATUS_INIT;
2215
2216 if (string[0])
2217 {
2218 expanded_location loc;
2219
2220 app_enable ();
2221 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2222 if (*loc.file && loc.line)
2223 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2224 ASM_COMMENT_START, loc.line, loc.file);
2225 fprintf (asm_out_file, "\t%s\n", string);
2226 #if HAVE_AS_LINE_ZERO
2227 if (*loc.file && loc.line)
2228 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2229 #endif
2230 }
2231 break;
2232 }
2233
2234 /* Detect `asm' construct with operands. */
2235 if (asm_noperands (body) >= 0)
2236 {
2237 unsigned int noperands = asm_noperands (body);
2238 rtx *ops = XALLOCAVEC (rtx, noperands);
2239 const char *string;
2240 location_t loc;
2241 expanded_location expanded;
2242
2243 /* There's no telling what that did to the condition codes. */
2244 CC_STATUS_INIT;
2245
2246 /* Get out the operand values. */
2247 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2248 /* Inhibit dying on what would otherwise be compiler bugs. */
2249 insn_noperands = noperands;
2250 this_is_asm_operands = insn;
2251 expanded = expand_location (loc);
2252
2253 #ifdef FINAL_PRESCAN_INSN
2254 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2255 #endif
2256
2257 /* Output the insn using them. */
2258 if (string[0])
2259 {
2260 app_enable ();
2261 if (expanded.file && expanded.line)
2262 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2263 ASM_COMMENT_START, expanded.line, expanded.file);
2264 output_asm_insn (string, ops);
2265 #if HAVE_AS_LINE_ZERO
2266 if (expanded.file && expanded.line)
2267 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2268 #endif
2269 }
2270
2271 if (targetm.asm_out.final_postscan_insn)
2272 targetm.asm_out.final_postscan_insn (file, insn, ops,
2273 insn_noperands);
2274
2275 this_is_asm_operands = 0;
2276 break;
2277 }
2278
2279 app_disable ();
2280
2281 if (GET_CODE (body) == SEQUENCE)
2282 {
2283 /* A delayed-branch sequence */
2284 int i;
2285
2286 final_sequence = body;
2287
2288 /* Record the delay slots' frame information before the branch.
2289 This is needed for delayed calls: see execute_cfa_program(). */
2290 #if defined (DWARF2_UNWIND_INFO)
2291 if (dwarf2out_do_frame ())
2292 for (i = 1; i < XVECLEN (body, 0); i++)
2293 dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2294 #endif
2295
2296 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2297 force the restoration of a comparison that was previously
2298 thought unnecessary. If that happens, cancel this sequence
2299 and cause that insn to be restored. */
2300
2301 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2302 if (next != XVECEXP (body, 0, 1))
2303 {
2304 final_sequence = 0;
2305 return next;
2306 }
2307
2308 for (i = 1; i < XVECLEN (body, 0); i++)
2309 {
2310 rtx insn = XVECEXP (body, 0, i);
2311 rtx next = NEXT_INSN (insn);
2312 /* We loop in case any instruction in a delay slot gets
2313 split. */
2314 do
2315 insn = final_scan_insn (insn, file, 0, 1, seen);
2316 while (insn != next);
2317 }
2318 #ifdef DBR_OUTPUT_SEQEND
2319 DBR_OUTPUT_SEQEND (file);
2320 #endif
2321 final_sequence = 0;
2322
2323 /* If the insn requiring the delay slot was a CALL_INSN, the
2324 insns in the delay slot are actually executed before the
2325 called function. Hence we don't preserve any CC-setting
2326 actions in these insns and the CC must be marked as being
2327 clobbered by the function. */
2328 if (CALL_P (XVECEXP (body, 0, 0)))
2329 {
2330 CC_STATUS_INIT;
2331 }
2332 break;
2333 }
2334
2335 /* We have a real machine instruction as rtl. */
2336
2337 body = PATTERN (insn);
2338
2339 #ifdef HAVE_cc0
2340 set = single_set (insn);
2341
2342 /* Check for redundant test and compare instructions
2343 (when the condition codes are already set up as desired).
2344 This is done only when optimizing; if not optimizing,
2345 it should be possible for the user to alter a variable
2346 with the debugger in between statements
2347 and the next statement should reexamine the variable
2348 to compute the condition codes. */
2349
2350 if (optimize)
2351 {
2352 if (set
2353 && GET_CODE (SET_DEST (set)) == CC0
2354 && insn != last_ignored_compare)
2355 {
2356 rtx src1, src2;
2357 if (GET_CODE (SET_SRC (set)) == SUBREG)
2358 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2359
2360 src1 = SET_SRC (set);
2361 src2 = NULL_RTX;
2362 if (GET_CODE (SET_SRC (set)) == COMPARE)
2363 {
2364 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2365 XEXP (SET_SRC (set), 0)
2366 = alter_subreg (&XEXP (SET_SRC (set), 0));
2367 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2368 XEXP (SET_SRC (set), 1)
2369 = alter_subreg (&XEXP (SET_SRC (set), 1));
2370 if (XEXP (SET_SRC (set), 1)
2371 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2372 src2 = XEXP (SET_SRC (set), 0);
2373 }
2374 if ((cc_status.value1 != 0
2375 && rtx_equal_p (src1, cc_status.value1))
2376 || (cc_status.value2 != 0
2377 && rtx_equal_p (src1, cc_status.value2))
2378 || (src2 != 0 && cc_status.value1 != 0
2379 && rtx_equal_p (src2, cc_status.value1))
2380 || (src2 != 0 && cc_status.value2 != 0
2381 && rtx_equal_p (src2, cc_status.value2)))
2382 {
2383 /* Don't delete insn if it has an addressing side-effect. */
2384 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2385 /* or if anything in it is volatile. */
2386 && ! volatile_refs_p (PATTERN (insn)))
2387 {
2388 /* We don't really delete the insn; just ignore it. */
2389 last_ignored_compare = insn;
2390 break;
2391 }
2392 }
2393 }
2394 }
2395
2396 /* If this is a conditional branch, maybe modify it
2397 if the cc's are in a nonstandard state
2398 so that it accomplishes the same thing that it would
2399 do straightforwardly if the cc's were set up normally. */
2400
2401 if (cc_status.flags != 0
2402 && JUMP_P (insn)
2403 && GET_CODE (body) == SET
2404 && SET_DEST (body) == pc_rtx
2405 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2406 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2407 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2408 {
2409 /* This function may alter the contents of its argument
2410 and clear some of the cc_status.flags bits.
2411 It may also return 1 meaning condition now always true
2412 or -1 meaning condition now always false
2413 or 2 meaning condition nontrivial but altered. */
2414 int result = alter_cond (XEXP (SET_SRC (body), 0));
2415 /* If condition now has fixed value, replace the IF_THEN_ELSE
2416 with its then-operand or its else-operand. */
2417 if (result == 1)
2418 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2419 if (result == -1)
2420 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2421
2422 /* The jump is now either unconditional or a no-op.
2423 If it has become a no-op, don't try to output it.
2424 (It would not be recognized.) */
2425 if (SET_SRC (body) == pc_rtx)
2426 {
2427 delete_insn (insn);
2428 break;
2429 }
2430 else if (GET_CODE (SET_SRC (body)) == RETURN)
2431 /* Replace (set (pc) (return)) with (return). */
2432 PATTERN (insn) = body = SET_SRC (body);
2433
2434 /* Rerecognize the instruction if it has changed. */
2435 if (result != 0)
2436 INSN_CODE (insn) = -1;
2437 }
2438
2439 /* If this is a conditional trap, maybe modify it if the cc's
2440 are in a nonstandard state so that it accomplishes the same
2441 thing that it would do straightforwardly if the cc's were
2442 set up normally. */
2443 if (cc_status.flags != 0
2444 && NONJUMP_INSN_P (insn)
2445 && GET_CODE (body) == TRAP_IF
2446 && COMPARISON_P (TRAP_CONDITION (body))
2447 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2448 {
2449 /* This function may alter the contents of its argument
2450 and clear some of the cc_status.flags bits.
2451 It may also return 1 meaning condition now always true
2452 or -1 meaning condition now always false
2453 or 2 meaning condition nontrivial but altered. */
2454 int result = alter_cond (TRAP_CONDITION (body));
2455
2456 /* If TRAP_CONDITION has become always false, delete the
2457 instruction. */
2458 if (result == -1)
2459 {
2460 delete_insn (insn);
2461 break;
2462 }
2463
2464 /* If TRAP_CONDITION has become always true, replace
2465 TRAP_CONDITION with const_true_rtx. */
2466 if (result == 1)
2467 TRAP_CONDITION (body) = const_true_rtx;
2468
2469 /* Rerecognize the instruction if it has changed. */
2470 if (result != 0)
2471 INSN_CODE (insn) = -1;
2472 }
2473
2474 /* Make same adjustments to instructions that examine the
2475 condition codes without jumping and instructions that
2476 handle conditional moves (if this machine has either one). */
2477
2478 if (cc_status.flags != 0
2479 && set != 0)
2480 {
2481 rtx cond_rtx, then_rtx, else_rtx;
2482
2483 if (!JUMP_P (insn)
2484 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2485 {
2486 cond_rtx = XEXP (SET_SRC (set), 0);
2487 then_rtx = XEXP (SET_SRC (set), 1);
2488 else_rtx = XEXP (SET_SRC (set), 2);
2489 }
2490 else
2491 {
2492 cond_rtx = SET_SRC (set);
2493 then_rtx = const_true_rtx;
2494 else_rtx = const0_rtx;
2495 }
2496
2497 switch (GET_CODE (cond_rtx))
2498 {
2499 case GTU:
2500 case GT:
2501 case LTU:
2502 case LT:
2503 case GEU:
2504 case GE:
2505 case LEU:
2506 case LE:
2507 case EQ:
2508 case NE:
2509 {
2510 int result;
2511 if (XEXP (cond_rtx, 0) != cc0_rtx)
2512 break;
2513 result = alter_cond (cond_rtx);
2514 if (result == 1)
2515 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2516 else if (result == -1)
2517 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2518 else if (result == 2)
2519 INSN_CODE (insn) = -1;
2520 if (SET_DEST (set) == SET_SRC (set))
2521 delete_insn (insn);
2522 }
2523 break;
2524
2525 default:
2526 break;
2527 }
2528 }
2529
2530 #endif
2531
2532 #ifdef HAVE_peephole
2533 /* Do machine-specific peephole optimizations if desired. */
2534
2535 if (optimize && !flag_no_peephole && !nopeepholes)
2536 {
2537 rtx next = peephole (insn);
2538 /* When peepholing, if there were notes within the peephole,
2539 emit them before the peephole. */
2540 if (next != 0 && next != NEXT_INSN (insn))
2541 {
2542 rtx note, prev = PREV_INSN (insn);
2543
2544 for (note = NEXT_INSN (insn); note != next;
2545 note = NEXT_INSN (note))
2546 final_scan_insn (note, file, optimize, nopeepholes, seen);
2547
2548 /* Put the notes in the proper position for a later
2549 rescan. For example, the SH target can do this
2550 when generating a far jump in a delayed branch
2551 sequence. */
2552 note = NEXT_INSN (insn);
2553 PREV_INSN (note) = prev;
2554 NEXT_INSN (prev) = note;
2555 NEXT_INSN (PREV_INSN (next)) = insn;
2556 PREV_INSN (insn) = PREV_INSN (next);
2557 NEXT_INSN (insn) = next;
2558 PREV_INSN (next) = insn;
2559 }
2560
2561 /* PEEPHOLE might have changed this. */
2562 body = PATTERN (insn);
2563 }
2564 #endif
2565
2566 /* Try to recognize the instruction.
2567 If successful, verify that the operands satisfy the
2568 constraints for the instruction. Crash if they don't,
2569 since `reload' should have changed them so that they do. */
2570
2571 insn_code_number = recog_memoized (insn);
2572 cleanup_subreg_operands (insn);
2573
2574 /* Dump the insn in the assembly for debugging. */
2575 if (flag_dump_rtl_in_asm)
2576 {
2577 print_rtx_head = ASM_COMMENT_START;
2578 print_rtl_single (asm_out_file, insn);
2579 print_rtx_head = "";
2580 }
2581
2582 if (! constrain_operands_cached (1))
2583 fatal_insn_not_found (insn);
2584
2585 /* Some target machines need to prescan each insn before
2586 it is output. */
2587
2588 #ifdef FINAL_PRESCAN_INSN
2589 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2590 #endif
2591
2592 if (targetm.have_conditional_execution ()
2593 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2594 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2595
2596 #ifdef HAVE_cc0
2597 cc_prev_status = cc_status;
2598
2599 /* Update `cc_status' for this instruction.
2600 The instruction's output routine may change it further.
2601 If the output routine for a jump insn needs to depend
2602 on the cc status, it should look at cc_prev_status. */
2603
2604 NOTICE_UPDATE_CC (body, insn);
2605 #endif
2606
2607 current_output_insn = debug_insn = insn;
2608
2609 #if defined (DWARF2_UNWIND_INFO)
2610 if (CALL_P (insn) && dwarf2out_do_frame ())
2611 dwarf2out_frame_debug (insn, false);
2612 #endif
2613
2614 /* Find the proper template for this insn. */
2615 templ = get_insn_template (insn_code_number, insn);
2616
2617 /* If the C code returns 0, it means that it is a jump insn
2618 which follows a deleted test insn, and that test insn
2619 needs to be reinserted. */
2620 if (templ == 0)
2621 {
2622 rtx prev;
2623
2624 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2625
2626 /* We have already processed the notes between the setter and
2627 the user. Make sure we don't process them again, this is
2628 particularly important if one of the notes is a block
2629 scope note or an EH note. */
2630 for (prev = insn;
2631 prev != last_ignored_compare;
2632 prev = PREV_INSN (prev))
2633 {
2634 if (NOTE_P (prev))
2635 delete_insn (prev); /* Use delete_note. */
2636 }
2637
2638 return prev;
2639 }
2640
2641 /* If the template is the string "#", it means that this insn must
2642 be split. */
2643 if (templ[0] == '#' && templ[1] == '\0')
2644 {
2645 rtx new_rtx = try_split (body, insn, 0);
2646
2647 /* If we didn't split the insn, go away. */
2648 if (new_rtx == insn && PATTERN (new_rtx) == body)
2649 fatal_insn ("could not split insn", insn);
2650
2651 #ifdef HAVE_ATTR_length
2652 /* This instruction should have been split in shorten_branches,
2653 to ensure that we would have valid length info for the
2654 splitees. */
2655 gcc_unreachable ();
2656 #endif
2657
2658 return new_rtx;
2659 }
2660
2661 #ifdef TARGET_UNWIND_INFO
2662 /* ??? This will put the directives in the wrong place if
2663 get_insn_template outputs assembly directly. However calling it
2664 before get_insn_template breaks if the insns is split. */
2665 targetm.asm_out.unwind_emit (asm_out_file, insn);
2666 #endif
2667
2668 if (CALL_P (insn))
2669 {
2670 rtx x = call_from_call_insn (insn);
2671 x = XEXP (x, 0);
2672 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2673 {
2674 tree t;
2675 x = XEXP (x, 0);
2676 t = SYMBOL_REF_DECL (x);
2677 if (t)
2678 assemble_external (t);
2679 }
2680 }
2681
2682 /* Output assembler code from the template. */
2683 output_asm_insn (templ, recog_data.operand);
2684
2685 /* Record point-of-call information for ICF debugging. */
2686 if (flag_enable_icf_debug && CALL_P (insn))
2687 {
2688 rtx x = call_from_call_insn (insn);
2689 x = XEXP (x, 0);
2690 if (x && MEM_P (x))
2691 {
2692 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2693 {
2694 tree t;
2695 x = XEXP (x, 0);
2696 t = SYMBOL_REF_DECL (x);
2697 if (t)
2698 (*debug_hooks->direct_call) (t);
2699 }
2700 else
2701 (*debug_hooks->virtual_call) (INSN_UID (insn));
2702 }
2703 }
2704
2705 /* Some target machines need to postscan each insn after
2706 it is output. */
2707 if (targetm.asm_out.final_postscan_insn)
2708 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2709 recog_data.n_operands);
2710
2711 /* If necessary, report the effect that the instruction has on
2712 the unwind info. We've already done this for delay slots
2713 and call instructions. */
2714 #if defined (DWARF2_UNWIND_INFO)
2715 if (final_sequence == 0
2716 #if !defined (HAVE_prologue)
2717 && !ACCUMULATE_OUTGOING_ARGS
2718 #endif
2719 && dwarf2out_do_frame ())
2720 dwarf2out_frame_debug (insn, true);
2721 #endif
2722
2723 current_output_insn = debug_insn = 0;
2724 }
2725 }
2726 return NEXT_INSN (insn);
2727 }
2728 \f
2729 /* Return whether a source line note needs to be emitted before INSN.
2730 Sets IS_STMT to TRUE if the line should be marked as a possible
2731 breakpoint location. */
2732
2733 static bool
2734 notice_source_line (rtx insn, bool *is_stmt)
2735 {
2736 const char *filename;
2737 int linenum;
2738
2739 if (override_filename)
2740 {
2741 filename = override_filename;
2742 linenum = override_linenum;
2743 }
2744 else
2745 {
2746 filename = insn_file (insn);
2747 linenum = insn_line (insn);
2748 }
2749
2750 if (filename == NULL)
2751 return false;
2752
2753 if (force_source_line
2754 || filename != last_filename
2755 || last_linenum != linenum)
2756 {
2757 force_source_line = false;
2758 last_filename = filename;
2759 last_linenum = linenum;
2760 last_discriminator = discriminator;
2761 *is_stmt = true;
2762 high_block_linenum = MAX (last_linenum, high_block_linenum);
2763 high_function_linenum = MAX (last_linenum, high_function_linenum);
2764 return true;
2765 }
2766
2767 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2768 {
2769 /* If the discriminator changed, but the line number did not,
2770 output the line table entry with is_stmt false so the
2771 debugger does not treat this as a breakpoint location. */
2772 last_discriminator = discriminator;
2773 *is_stmt = false;
2774 return true;
2775 }
2776
2777 return false;
2778 }
2779 \f
2780 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2781 directly to the desired hard register. */
2782
2783 void
2784 cleanup_subreg_operands (rtx insn)
2785 {
2786 int i;
2787 bool changed = false;
2788 extract_insn_cached (insn);
2789 for (i = 0; i < recog_data.n_operands; i++)
2790 {
2791 /* The following test cannot use recog_data.operand when testing
2792 for a SUBREG: the underlying object might have been changed
2793 already if we are inside a match_operator expression that
2794 matches the else clause. Instead we test the underlying
2795 expression directly. */
2796 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2797 {
2798 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2799 changed = true;
2800 }
2801 else if (GET_CODE (recog_data.operand[i]) == PLUS
2802 || GET_CODE (recog_data.operand[i]) == MULT
2803 || MEM_P (recog_data.operand[i]))
2804 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2805 }
2806
2807 for (i = 0; i < recog_data.n_dups; i++)
2808 {
2809 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2810 {
2811 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2812 changed = true;
2813 }
2814 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2815 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2816 || MEM_P (*recog_data.dup_loc[i]))
2817 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2818 }
2819 if (changed)
2820 df_insn_rescan (insn);
2821 }
2822
2823 /* If X is a SUBREG, replace it with a REG or a MEM,
2824 based on the thing it is a subreg of. */
2825
2826 rtx
2827 alter_subreg (rtx *xp)
2828 {
2829 rtx x = *xp;
2830 rtx y = SUBREG_REG (x);
2831
2832 /* simplify_subreg does not remove subreg from volatile references.
2833 We are required to. */
2834 if (MEM_P (y))
2835 {
2836 int offset = SUBREG_BYTE (x);
2837
2838 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2839 contains 0 instead of the proper offset. See simplify_subreg. */
2840 if (offset == 0
2841 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2842 {
2843 int difference = GET_MODE_SIZE (GET_MODE (y))
2844 - GET_MODE_SIZE (GET_MODE (x));
2845 if (WORDS_BIG_ENDIAN)
2846 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2847 if (BYTES_BIG_ENDIAN)
2848 offset += difference % UNITS_PER_WORD;
2849 }
2850
2851 *xp = adjust_address (y, GET_MODE (x), offset);
2852 }
2853 else
2854 {
2855 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2856 SUBREG_BYTE (x));
2857
2858 if (new_rtx != 0)
2859 *xp = new_rtx;
2860 else if (REG_P (y))
2861 {
2862 /* Simplify_subreg can't handle some REG cases, but we have to. */
2863 unsigned int regno;
2864 HOST_WIDE_INT offset;
2865
2866 regno = subreg_regno (x);
2867 if (subreg_lowpart_p (x))
2868 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
2869 else
2870 offset = SUBREG_BYTE (x);
2871 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
2872 }
2873 }
2874
2875 return *xp;
2876 }
2877
2878 /* Do alter_subreg on all the SUBREGs contained in X. */
2879
2880 static rtx
2881 walk_alter_subreg (rtx *xp, bool *changed)
2882 {
2883 rtx x = *xp;
2884 switch (GET_CODE (x))
2885 {
2886 case PLUS:
2887 case MULT:
2888 case AND:
2889 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2890 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2891 break;
2892
2893 case MEM:
2894 case ZERO_EXTEND:
2895 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2896 break;
2897
2898 case SUBREG:
2899 *changed = true;
2900 return alter_subreg (xp);
2901
2902 default:
2903 break;
2904 }
2905
2906 return *xp;
2907 }
2908 \f
2909 #ifdef HAVE_cc0
2910
2911 /* Given BODY, the body of a jump instruction, alter the jump condition
2912 as required by the bits that are set in cc_status.flags.
2913 Not all of the bits there can be handled at this level in all cases.
2914
2915 The value is normally 0.
2916 1 means that the condition has become always true.
2917 -1 means that the condition has become always false.
2918 2 means that COND has been altered. */
2919
2920 static int
2921 alter_cond (rtx cond)
2922 {
2923 int value = 0;
2924
2925 if (cc_status.flags & CC_REVERSED)
2926 {
2927 value = 2;
2928 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2929 }
2930
2931 if (cc_status.flags & CC_INVERTED)
2932 {
2933 value = 2;
2934 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2935 }
2936
2937 if (cc_status.flags & CC_NOT_POSITIVE)
2938 switch (GET_CODE (cond))
2939 {
2940 case LE:
2941 case LEU:
2942 case GEU:
2943 /* Jump becomes unconditional. */
2944 return 1;
2945
2946 case GT:
2947 case GTU:
2948 case LTU:
2949 /* Jump becomes no-op. */
2950 return -1;
2951
2952 case GE:
2953 PUT_CODE (cond, EQ);
2954 value = 2;
2955 break;
2956
2957 case LT:
2958 PUT_CODE (cond, NE);
2959 value = 2;
2960 break;
2961
2962 default:
2963 break;
2964 }
2965
2966 if (cc_status.flags & CC_NOT_NEGATIVE)
2967 switch (GET_CODE (cond))
2968 {
2969 case GE:
2970 case GEU:
2971 /* Jump becomes unconditional. */
2972 return 1;
2973
2974 case LT:
2975 case LTU:
2976 /* Jump becomes no-op. */
2977 return -1;
2978
2979 case LE:
2980 case LEU:
2981 PUT_CODE (cond, EQ);
2982 value = 2;
2983 break;
2984
2985 case GT:
2986 case GTU:
2987 PUT_CODE (cond, NE);
2988 value = 2;
2989 break;
2990
2991 default:
2992 break;
2993 }
2994
2995 if (cc_status.flags & CC_NO_OVERFLOW)
2996 switch (GET_CODE (cond))
2997 {
2998 case GEU:
2999 /* Jump becomes unconditional. */
3000 return 1;
3001
3002 case LEU:
3003 PUT_CODE (cond, EQ);
3004 value = 2;
3005 break;
3006
3007 case GTU:
3008 PUT_CODE (cond, NE);
3009 value = 2;
3010 break;
3011
3012 case LTU:
3013 /* Jump becomes no-op. */
3014 return -1;
3015
3016 default:
3017 break;
3018 }
3019
3020 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3021 switch (GET_CODE (cond))
3022 {
3023 default:
3024 gcc_unreachable ();
3025
3026 case NE:
3027 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3028 value = 2;
3029 break;
3030
3031 case EQ:
3032 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3033 value = 2;
3034 break;
3035 }
3036
3037 if (cc_status.flags & CC_NOT_SIGNED)
3038 /* The flags are valid if signed condition operators are converted
3039 to unsigned. */
3040 switch (GET_CODE (cond))
3041 {
3042 case LE:
3043 PUT_CODE (cond, LEU);
3044 value = 2;
3045 break;
3046
3047 case LT:
3048 PUT_CODE (cond, LTU);
3049 value = 2;
3050 break;
3051
3052 case GT:
3053 PUT_CODE (cond, GTU);
3054 value = 2;
3055 break;
3056
3057 case GE:
3058 PUT_CODE (cond, GEU);
3059 value = 2;
3060 break;
3061
3062 default:
3063 break;
3064 }
3065
3066 return value;
3067 }
3068 #endif
3069 \f
3070 /* Report inconsistency between the assembler template and the operands.
3071 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3072
3073 void
3074 output_operand_lossage (const char *cmsgid, ...)
3075 {
3076 char *fmt_string;
3077 char *new_message;
3078 const char *pfx_str;
3079 va_list ap;
3080
3081 va_start (ap, cmsgid);
3082
3083 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3084 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3085 vasprintf (&new_message, fmt_string, ap);
3086
3087 if (this_is_asm_operands)
3088 error_for_asm (this_is_asm_operands, "%s", new_message);
3089 else
3090 internal_error ("%s", new_message);
3091
3092 free (fmt_string);
3093 free (new_message);
3094 va_end (ap);
3095 }
3096 \f
3097 /* Output of assembler code from a template, and its subroutines. */
3098
3099 /* Annotate the assembly with a comment describing the pattern and
3100 alternative used. */
3101
3102 static void
3103 output_asm_name (void)
3104 {
3105 if (debug_insn)
3106 {
3107 int num = INSN_CODE (debug_insn);
3108 fprintf (asm_out_file, "\t%s %d\t%s",
3109 ASM_COMMENT_START, INSN_UID (debug_insn),
3110 insn_data[num].name);
3111 if (insn_data[num].n_alternatives > 1)
3112 fprintf (asm_out_file, "/%d", which_alternative + 1);
3113 #ifdef HAVE_ATTR_length
3114 fprintf (asm_out_file, "\t[length = %d]",
3115 get_attr_length (debug_insn));
3116 #endif
3117 /* Clear this so only the first assembler insn
3118 of any rtl insn will get the special comment for -dp. */
3119 debug_insn = 0;
3120 }
3121 }
3122
3123 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3124 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3125 corresponds to the address of the object and 0 if to the object. */
3126
3127 static tree
3128 get_mem_expr_from_op (rtx op, int *paddressp)
3129 {
3130 tree expr;
3131 int inner_addressp;
3132
3133 *paddressp = 0;
3134
3135 if (REG_P (op))
3136 return REG_EXPR (op);
3137 else if (!MEM_P (op))
3138 return 0;
3139
3140 if (MEM_EXPR (op) != 0)
3141 return MEM_EXPR (op);
3142
3143 /* Otherwise we have an address, so indicate it and look at the address. */
3144 *paddressp = 1;
3145 op = XEXP (op, 0);
3146
3147 /* First check if we have a decl for the address, then look at the right side
3148 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3149 But don't allow the address to itself be indirect. */
3150 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3151 return expr;
3152 else if (GET_CODE (op) == PLUS
3153 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3154 return expr;
3155
3156 while (UNARY_P (op)
3157 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3158 op = XEXP (op, 0);
3159
3160 expr = get_mem_expr_from_op (op, &inner_addressp);
3161 return inner_addressp ? 0 : expr;
3162 }
3163
3164 /* Output operand names for assembler instructions. OPERANDS is the
3165 operand vector, OPORDER is the order to write the operands, and NOPS
3166 is the number of operands to write. */
3167
3168 static void
3169 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3170 {
3171 int wrote = 0;
3172 int i;
3173
3174 for (i = 0; i < nops; i++)
3175 {
3176 int addressp;
3177 rtx op = operands[oporder[i]];
3178 tree expr = get_mem_expr_from_op (op, &addressp);
3179
3180 fprintf (asm_out_file, "%c%s",
3181 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3182 wrote = 1;
3183 if (expr)
3184 {
3185 fprintf (asm_out_file, "%s",
3186 addressp ? "*" : "");
3187 print_mem_expr (asm_out_file, expr);
3188 wrote = 1;
3189 }
3190 else if (REG_P (op) && ORIGINAL_REGNO (op)
3191 && ORIGINAL_REGNO (op) != REGNO (op))
3192 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3193 }
3194 }
3195
3196 /* Output text from TEMPLATE to the assembler output file,
3197 obeying %-directions to substitute operands taken from
3198 the vector OPERANDS.
3199
3200 %N (for N a digit) means print operand N in usual manner.
3201 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3202 and print the label name with no punctuation.
3203 %cN means require operand N to be a constant
3204 and print the constant expression with no punctuation.
3205 %aN means expect operand N to be a memory address
3206 (not a memory reference!) and print a reference
3207 to that address.
3208 %nN means expect operand N to be a constant
3209 and print a constant expression for minus the value
3210 of the operand, with no other punctuation. */
3211
3212 void
3213 output_asm_insn (const char *templ, rtx *operands)
3214 {
3215 const char *p;
3216 int c;
3217 #ifdef ASSEMBLER_DIALECT
3218 int dialect = 0;
3219 #endif
3220 int oporder[MAX_RECOG_OPERANDS];
3221 char opoutput[MAX_RECOG_OPERANDS];
3222 int ops = 0;
3223
3224 /* An insn may return a null string template
3225 in a case where no assembler code is needed. */
3226 if (*templ == 0)
3227 return;
3228
3229 memset (opoutput, 0, sizeof opoutput);
3230 p = templ;
3231 putc ('\t', asm_out_file);
3232
3233 #ifdef ASM_OUTPUT_OPCODE
3234 ASM_OUTPUT_OPCODE (asm_out_file, p);
3235 #endif
3236
3237 while ((c = *p++))
3238 switch (c)
3239 {
3240 case '\n':
3241 if (flag_verbose_asm)
3242 output_asm_operand_names (operands, oporder, ops);
3243 if (flag_print_asm_name)
3244 output_asm_name ();
3245
3246 ops = 0;
3247 memset (opoutput, 0, sizeof opoutput);
3248
3249 putc (c, asm_out_file);
3250 #ifdef ASM_OUTPUT_OPCODE
3251 while ((c = *p) == '\t')
3252 {
3253 putc (c, asm_out_file);
3254 p++;
3255 }
3256 ASM_OUTPUT_OPCODE (asm_out_file, p);
3257 #endif
3258 break;
3259
3260 #ifdef ASSEMBLER_DIALECT
3261 case '{':
3262 {
3263 int i;
3264
3265 if (dialect)
3266 output_operand_lossage ("nested assembly dialect alternatives");
3267 else
3268 dialect = 1;
3269
3270 /* If we want the first dialect, do nothing. Otherwise, skip
3271 DIALECT_NUMBER of strings ending with '|'. */
3272 for (i = 0; i < dialect_number; i++)
3273 {
3274 while (*p && *p != '}' && *p++ != '|')
3275 ;
3276 if (*p == '}')
3277 break;
3278 if (*p == '|')
3279 p++;
3280 }
3281
3282 if (*p == '\0')
3283 output_operand_lossage ("unterminated assembly dialect alternative");
3284 }
3285 break;
3286
3287 case '|':
3288 if (dialect)
3289 {
3290 /* Skip to close brace. */
3291 do
3292 {
3293 if (*p == '\0')
3294 {
3295 output_operand_lossage ("unterminated assembly dialect alternative");
3296 break;
3297 }
3298 }
3299 while (*p++ != '}');
3300 dialect = 0;
3301 }
3302 else
3303 putc (c, asm_out_file);
3304 break;
3305
3306 case '}':
3307 if (! dialect)
3308 putc (c, asm_out_file);
3309 dialect = 0;
3310 break;
3311 #endif
3312
3313 case '%':
3314 /* %% outputs a single %. */
3315 if (*p == '%')
3316 {
3317 p++;
3318 putc (c, asm_out_file);
3319 }
3320 /* %= outputs a number which is unique to each insn in the entire
3321 compilation. This is useful for making local labels that are
3322 referred to more than once in a given insn. */
3323 else if (*p == '=')
3324 {
3325 p++;
3326 fprintf (asm_out_file, "%d", insn_counter);
3327 }
3328 /* % followed by a letter and some digits
3329 outputs an operand in a special way depending on the letter.
3330 Letters `acln' are implemented directly.
3331 Other letters are passed to `output_operand' so that
3332 the TARGET_PRINT_OPERAND hook can define them. */
3333 else if (ISALPHA (*p))
3334 {
3335 int letter = *p++;
3336 unsigned long opnum;
3337 char *endptr;
3338
3339 opnum = strtoul (p, &endptr, 10);
3340
3341 if (endptr == p)
3342 output_operand_lossage ("operand number missing "
3343 "after %%-letter");
3344 else if (this_is_asm_operands && opnum >= insn_noperands)
3345 output_operand_lossage ("operand number out of range");
3346 else if (letter == 'l')
3347 output_asm_label (operands[opnum]);
3348 else if (letter == 'a')
3349 output_address (operands[opnum]);
3350 else if (letter == 'c')
3351 {
3352 if (CONSTANT_ADDRESS_P (operands[opnum]))
3353 output_addr_const (asm_out_file, operands[opnum]);
3354 else
3355 output_operand (operands[opnum], 'c');
3356 }
3357 else if (letter == 'n')
3358 {
3359 if (CONST_INT_P (operands[opnum]))
3360 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3361 - INTVAL (operands[opnum]));
3362 else
3363 {
3364 putc ('-', asm_out_file);
3365 output_addr_const (asm_out_file, operands[opnum]);
3366 }
3367 }
3368 else
3369 output_operand (operands[opnum], letter);
3370
3371 if (!opoutput[opnum])
3372 oporder[ops++] = opnum;
3373 opoutput[opnum] = 1;
3374
3375 p = endptr;
3376 c = *p;
3377 }
3378 /* % followed by a digit outputs an operand the default way. */
3379 else if (ISDIGIT (*p))
3380 {
3381 unsigned long opnum;
3382 char *endptr;
3383
3384 opnum = strtoul (p, &endptr, 10);
3385 if (this_is_asm_operands && opnum >= insn_noperands)
3386 output_operand_lossage ("operand number out of range");
3387 else
3388 output_operand (operands[opnum], 0);
3389
3390 if (!opoutput[opnum])
3391 oporder[ops++] = opnum;
3392 opoutput[opnum] = 1;
3393
3394 p = endptr;
3395 c = *p;
3396 }
3397 /* % followed by punctuation: output something for that
3398 punctuation character alone, with no operand. The
3399 TARGET_PRINT_OPERAND hook decides what is actually done. */
3400 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3401 output_operand (NULL_RTX, *p++);
3402 else
3403 output_operand_lossage ("invalid %%-code");
3404 break;
3405
3406 default:
3407 putc (c, asm_out_file);
3408 }
3409
3410 /* Write out the variable names for operands, if we know them. */
3411 if (flag_verbose_asm)
3412 output_asm_operand_names (operands, oporder, ops);
3413 if (flag_print_asm_name)
3414 output_asm_name ();
3415
3416 putc ('\n', asm_out_file);
3417 }
3418 \f
3419 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3420
3421 void
3422 output_asm_label (rtx x)
3423 {
3424 char buf[256];
3425
3426 if (GET_CODE (x) == LABEL_REF)
3427 x = XEXP (x, 0);
3428 if (LABEL_P (x)
3429 || (NOTE_P (x)
3430 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3431 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3432 else
3433 output_operand_lossage ("'%%l' operand isn't a label");
3434
3435 assemble_name (asm_out_file, buf);
3436 }
3437
3438 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3439 output_operand. Marks SYMBOL_REFs as referenced through use of
3440 assemble_external. */
3441
3442 static int
3443 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3444 {
3445 rtx x = *xp;
3446
3447 /* If we have a used symbol, we may have to emit assembly
3448 annotations corresponding to whether the symbol is external, weak
3449 or has non-default visibility. */
3450 if (GET_CODE (x) == SYMBOL_REF)
3451 {
3452 tree t;
3453
3454 t = SYMBOL_REF_DECL (x);
3455 if (t)
3456 assemble_external (t);
3457
3458 return -1;
3459 }
3460
3461 return 0;
3462 }
3463
3464 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3465
3466 void
3467 mark_symbol_refs_as_used (rtx x)
3468 {
3469 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3470 }
3471
3472 /* Print operand X using machine-dependent assembler syntax.
3473 CODE is a non-digit that preceded the operand-number in the % spec,
3474 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3475 between the % and the digits.
3476 When CODE is a non-letter, X is 0.
3477
3478 The meanings of the letters are machine-dependent and controlled
3479 by TARGET_PRINT_OPERAND. */
3480
3481 void
3482 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3483 {
3484 if (x && GET_CODE (x) == SUBREG)
3485 x = alter_subreg (&x);
3486
3487 /* X must not be a pseudo reg. */
3488 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3489
3490 targetm.asm_out.print_operand (asm_out_file, x, code);
3491
3492 if (x == NULL_RTX)
3493 return;
3494
3495 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3496 }
3497
3498 /* Print a memory reference operand for address X using
3499 machine-dependent assembler syntax. */
3500
3501 void
3502 output_address (rtx x)
3503 {
3504 bool changed = false;
3505 walk_alter_subreg (&x, &changed);
3506 targetm.asm_out.print_operand_address (asm_out_file, x);
3507 }
3508 \f
3509 /* Print an integer constant expression in assembler syntax.
3510 Addition and subtraction are the only arithmetic
3511 that may appear in these expressions. */
3512
3513 void
3514 output_addr_const (FILE *file, rtx x)
3515 {
3516 char buf[256];
3517
3518 restart:
3519 switch (GET_CODE (x))
3520 {
3521 case PC:
3522 putc ('.', file);
3523 break;
3524
3525 case SYMBOL_REF:
3526 if (SYMBOL_REF_DECL (x))
3527 assemble_external (SYMBOL_REF_DECL (x));
3528 #ifdef ASM_OUTPUT_SYMBOL_REF
3529 ASM_OUTPUT_SYMBOL_REF (file, x);
3530 #else
3531 assemble_name (file, XSTR (x, 0));
3532 #endif
3533 break;
3534
3535 case LABEL_REF:
3536 x = XEXP (x, 0);
3537 /* Fall through. */
3538 case CODE_LABEL:
3539 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3540 #ifdef ASM_OUTPUT_LABEL_REF
3541 ASM_OUTPUT_LABEL_REF (file, buf);
3542 #else
3543 assemble_name (file, buf);
3544 #endif
3545 break;
3546
3547 case CONST_INT:
3548 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3549 break;
3550
3551 case CONST:
3552 /* This used to output parentheses around the expression,
3553 but that does not work on the 386 (either ATT or BSD assembler). */
3554 output_addr_const (file, XEXP (x, 0));
3555 break;
3556
3557 case CONST_DOUBLE:
3558 if (GET_MODE (x) == VOIDmode)
3559 {
3560 /* We can use %d if the number is one word and positive. */
3561 if (CONST_DOUBLE_HIGH (x))
3562 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3563 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3564 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3565 else if (CONST_DOUBLE_LOW (x) < 0)
3566 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3567 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3568 else
3569 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3570 }
3571 else
3572 /* We can't handle floating point constants;
3573 PRINT_OPERAND must handle them. */
3574 output_operand_lossage ("floating constant misused");
3575 break;
3576
3577 case CONST_FIXED:
3578 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3579 (unsigned HOST_WIDE_INT) CONST_FIXED_VALUE_LOW (x));
3580 break;
3581
3582 case PLUS:
3583 /* Some assemblers need integer constants to appear last (eg masm). */
3584 if (CONST_INT_P (XEXP (x, 0)))
3585 {
3586 output_addr_const (file, XEXP (x, 1));
3587 if (INTVAL (XEXP (x, 0)) >= 0)
3588 fprintf (file, "+");
3589 output_addr_const (file, XEXP (x, 0));
3590 }
3591 else
3592 {
3593 output_addr_const (file, XEXP (x, 0));
3594 if (!CONST_INT_P (XEXP (x, 1))
3595 || INTVAL (XEXP (x, 1)) >= 0)
3596 fprintf (file, "+");
3597 output_addr_const (file, XEXP (x, 1));
3598 }
3599 break;
3600
3601 case MINUS:
3602 /* Avoid outputting things like x-x or x+5-x,
3603 since some assemblers can't handle that. */
3604 x = simplify_subtraction (x);
3605 if (GET_CODE (x) != MINUS)
3606 goto restart;
3607
3608 output_addr_const (file, XEXP (x, 0));
3609 fprintf (file, "-");
3610 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3611 || GET_CODE (XEXP (x, 1)) == PC
3612 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3613 output_addr_const (file, XEXP (x, 1));
3614 else
3615 {
3616 fputs (targetm.asm_out.open_paren, file);
3617 output_addr_const (file, XEXP (x, 1));
3618 fputs (targetm.asm_out.close_paren, file);
3619 }
3620 break;
3621
3622 case ZERO_EXTEND:
3623 case SIGN_EXTEND:
3624 case SUBREG:
3625 case TRUNCATE:
3626 output_addr_const (file, XEXP (x, 0));
3627 break;
3628
3629 default:
3630 #ifdef OUTPUT_ADDR_CONST_EXTRA
3631 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3632 break;
3633
3634 fail:
3635 #endif
3636 output_operand_lossage ("invalid expression as operand");
3637 }
3638 }
3639 \f
3640 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3641 %R prints the value of REGISTER_PREFIX.
3642 %L prints the value of LOCAL_LABEL_PREFIX.
3643 %U prints the value of USER_LABEL_PREFIX.
3644 %I prints the value of IMMEDIATE_PREFIX.
3645 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3646 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3647
3648 We handle alternate assembler dialects here, just like output_asm_insn. */
3649
3650 void
3651 asm_fprintf (FILE *file, const char *p, ...)
3652 {
3653 char buf[10];
3654 char *q, c;
3655 va_list argptr;
3656
3657 va_start (argptr, p);
3658
3659 buf[0] = '%';
3660
3661 while ((c = *p++))
3662 switch (c)
3663 {
3664 #ifdef ASSEMBLER_DIALECT
3665 case '{':
3666 {
3667 int i;
3668
3669 /* If we want the first dialect, do nothing. Otherwise, skip
3670 DIALECT_NUMBER of strings ending with '|'. */
3671 for (i = 0; i < dialect_number; i++)
3672 {
3673 while (*p && *p++ != '|')
3674 ;
3675
3676 if (*p == '|')
3677 p++;
3678 }
3679 }
3680 break;
3681
3682 case '|':
3683 /* Skip to close brace. */
3684 while (*p && *p++ != '}')
3685 ;
3686 break;
3687
3688 case '}':
3689 break;
3690 #endif
3691
3692 case '%':
3693 c = *p++;
3694 q = &buf[1];
3695 while (strchr ("-+ #0", c))
3696 {
3697 *q++ = c;
3698 c = *p++;
3699 }
3700 while (ISDIGIT (c) || c == '.')
3701 {
3702 *q++ = c;
3703 c = *p++;
3704 }
3705 switch (c)
3706 {
3707 case '%':
3708 putc ('%', file);
3709 break;
3710
3711 case 'd': case 'i': case 'u':
3712 case 'x': case 'X': case 'o':
3713 case 'c':
3714 *q++ = c;
3715 *q = 0;
3716 fprintf (file, buf, va_arg (argptr, int));
3717 break;
3718
3719 case 'w':
3720 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3721 'o' cases, but we do not check for those cases. It
3722 means that the value is a HOST_WIDE_INT, which may be
3723 either `long' or `long long'. */
3724 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3725 q += strlen (HOST_WIDE_INT_PRINT);
3726 *q++ = *p++;
3727 *q = 0;
3728 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3729 break;
3730
3731 case 'l':
3732 *q++ = c;
3733 #ifdef HAVE_LONG_LONG
3734 if (*p == 'l')
3735 {
3736 *q++ = *p++;
3737 *q++ = *p++;
3738 *q = 0;
3739 fprintf (file, buf, va_arg (argptr, long long));
3740 }
3741 else
3742 #endif
3743 {
3744 *q++ = *p++;
3745 *q = 0;
3746 fprintf (file, buf, va_arg (argptr, long));
3747 }
3748
3749 break;
3750
3751 case 's':
3752 *q++ = c;
3753 *q = 0;
3754 fprintf (file, buf, va_arg (argptr, char *));
3755 break;
3756
3757 case 'O':
3758 #ifdef ASM_OUTPUT_OPCODE
3759 ASM_OUTPUT_OPCODE (asm_out_file, p);
3760 #endif
3761 break;
3762
3763 case 'R':
3764 #ifdef REGISTER_PREFIX
3765 fprintf (file, "%s", REGISTER_PREFIX);
3766 #endif
3767 break;
3768
3769 case 'I':
3770 #ifdef IMMEDIATE_PREFIX
3771 fprintf (file, "%s", IMMEDIATE_PREFIX);
3772 #endif
3773 break;
3774
3775 case 'L':
3776 #ifdef LOCAL_LABEL_PREFIX
3777 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3778 #endif
3779 break;
3780
3781 case 'U':
3782 fputs (user_label_prefix, file);
3783 break;
3784
3785 #ifdef ASM_FPRINTF_EXTENSIONS
3786 /* Uppercase letters are reserved for general use by asm_fprintf
3787 and so are not available to target specific code. In order to
3788 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3789 they are defined here. As they get turned into real extensions
3790 to asm_fprintf they should be removed from this list. */
3791 case 'A': case 'B': case 'C': case 'D': case 'E':
3792 case 'F': case 'G': case 'H': case 'J': case 'K':
3793 case 'M': case 'N': case 'P': case 'Q': case 'S':
3794 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3795 break;
3796
3797 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3798 #endif
3799 default:
3800 gcc_unreachable ();
3801 }
3802 break;
3803
3804 default:
3805 putc (c, file);
3806 }
3807 va_end (argptr);
3808 }
3809 \f
3810 /* Split up a CONST_DOUBLE or integer constant rtx
3811 into two rtx's for single words,
3812 storing in *FIRST the word that comes first in memory in the target
3813 and in *SECOND the other. */
3814
3815 void
3816 split_double (rtx value, rtx *first, rtx *second)
3817 {
3818 if (CONST_INT_P (value))
3819 {
3820 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3821 {
3822 /* In this case the CONST_INT holds both target words.
3823 Extract the bits from it into two word-sized pieces.
3824 Sign extend each half to HOST_WIDE_INT. */
3825 unsigned HOST_WIDE_INT low, high;
3826 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3827
3828 /* Set sign_bit to the most significant bit of a word. */
3829 sign_bit = 1;
3830 sign_bit <<= BITS_PER_WORD - 1;
3831
3832 /* Set mask so that all bits of the word are set. We could
3833 have used 1 << BITS_PER_WORD instead of basing the
3834 calculation on sign_bit. However, on machines where
3835 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3836 compiler warning, even though the code would never be
3837 executed. */
3838 mask = sign_bit << 1;
3839 mask--;
3840
3841 /* Set sign_extend as any remaining bits. */
3842 sign_extend = ~mask;
3843
3844 /* Pick the lower word and sign-extend it. */
3845 low = INTVAL (value);
3846 low &= mask;
3847 if (low & sign_bit)
3848 low |= sign_extend;
3849
3850 /* Pick the higher word, shifted to the least significant
3851 bits, and sign-extend it. */
3852 high = INTVAL (value);
3853 high >>= BITS_PER_WORD - 1;
3854 high >>= 1;
3855 high &= mask;
3856 if (high & sign_bit)
3857 high |= sign_extend;
3858
3859 /* Store the words in the target machine order. */
3860 if (WORDS_BIG_ENDIAN)
3861 {
3862 *first = GEN_INT (high);
3863 *second = GEN_INT (low);
3864 }
3865 else
3866 {
3867 *first = GEN_INT (low);
3868 *second = GEN_INT (high);
3869 }
3870 }
3871 else
3872 {
3873 /* The rule for using CONST_INT for a wider mode
3874 is that we regard the value as signed.
3875 So sign-extend it. */
3876 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3877 if (WORDS_BIG_ENDIAN)
3878 {
3879 *first = high;
3880 *second = value;
3881 }
3882 else
3883 {
3884 *first = value;
3885 *second = high;
3886 }
3887 }
3888 }
3889 else if (GET_CODE (value) != CONST_DOUBLE)
3890 {
3891 if (WORDS_BIG_ENDIAN)
3892 {
3893 *first = const0_rtx;
3894 *second = value;
3895 }
3896 else
3897 {
3898 *first = value;
3899 *second = const0_rtx;
3900 }
3901 }
3902 else if (GET_MODE (value) == VOIDmode
3903 /* This is the old way we did CONST_DOUBLE integers. */
3904 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3905 {
3906 /* In an integer, the words are defined as most and least significant.
3907 So order them by the target's convention. */
3908 if (WORDS_BIG_ENDIAN)
3909 {
3910 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3911 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3912 }
3913 else
3914 {
3915 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3916 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3917 }
3918 }
3919 else
3920 {
3921 REAL_VALUE_TYPE r;
3922 long l[2];
3923 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3924
3925 /* Note, this converts the REAL_VALUE_TYPE to the target's
3926 format, splits up the floating point double and outputs
3927 exactly 32 bits of it into each of l[0] and l[1] --
3928 not necessarily BITS_PER_WORD bits. */
3929 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3930
3931 /* If 32 bits is an entire word for the target, but not for the host,
3932 then sign-extend on the host so that the number will look the same
3933 way on the host that it would on the target. See for instance
3934 simplify_unary_operation. The #if is needed to avoid compiler
3935 warnings. */
3936
3937 #if HOST_BITS_PER_LONG > 32
3938 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3939 {
3940 if (l[0] & ((long) 1 << 31))
3941 l[0] |= ((long) (-1) << 32);
3942 if (l[1] & ((long) 1 << 31))
3943 l[1] |= ((long) (-1) << 32);
3944 }
3945 #endif
3946
3947 *first = GEN_INT (l[0]);
3948 *second = GEN_INT (l[1]);
3949 }
3950 }
3951 \f
3952 /* Return nonzero if this function has no function calls. */
3953
3954 int
3955 leaf_function_p (void)
3956 {
3957 rtx insn;
3958 rtx link;
3959
3960 if (crtl->profile || profile_arc_flag)
3961 return 0;
3962
3963 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3964 {
3965 if (CALL_P (insn)
3966 && ! SIBLING_CALL_P (insn))
3967 return 0;
3968 if (NONJUMP_INSN_P (insn)
3969 && GET_CODE (PATTERN (insn)) == SEQUENCE
3970 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3971 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3972 return 0;
3973 }
3974 for (link = crtl->epilogue_delay_list;
3975 link;
3976 link = XEXP (link, 1))
3977 {
3978 insn = XEXP (link, 0);
3979
3980 if (CALL_P (insn)
3981 && ! SIBLING_CALL_P (insn))
3982 return 0;
3983 if (NONJUMP_INSN_P (insn)
3984 && GET_CODE (PATTERN (insn)) == SEQUENCE
3985 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3986 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3987 return 0;
3988 }
3989
3990 return 1;
3991 }
3992
3993 /* Return 1 if branch is a forward branch.
3994 Uses insn_shuid array, so it works only in the final pass. May be used by
3995 output templates to customary add branch prediction hints.
3996 */
3997 int
3998 final_forward_branch_p (rtx insn)
3999 {
4000 int insn_id, label_id;
4001
4002 gcc_assert (uid_shuid);
4003 insn_id = INSN_SHUID (insn);
4004 label_id = INSN_SHUID (JUMP_LABEL (insn));
4005 /* We've hit some insns that does not have id information available. */
4006 gcc_assert (insn_id && label_id);
4007 return insn_id < label_id;
4008 }
4009
4010 /* On some machines, a function with no call insns
4011 can run faster if it doesn't create its own register window.
4012 When output, the leaf function should use only the "output"
4013 registers. Ordinarily, the function would be compiled to use
4014 the "input" registers to find its arguments; it is a candidate
4015 for leaf treatment if it uses only the "input" registers.
4016 Leaf function treatment means renumbering so the function
4017 uses the "output" registers instead. */
4018
4019 #ifdef LEAF_REGISTERS
4020
4021 /* Return 1 if this function uses only the registers that can be
4022 safely renumbered. */
4023
4024 int
4025 only_leaf_regs_used (void)
4026 {
4027 int i;
4028 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4029
4030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4031 if ((df_regs_ever_live_p (i) || global_regs[i])
4032 && ! permitted_reg_in_leaf_functions[i])
4033 return 0;
4034
4035 if (crtl->uses_pic_offset_table
4036 && pic_offset_table_rtx != 0
4037 && REG_P (pic_offset_table_rtx)
4038 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4039 return 0;
4040
4041 return 1;
4042 }
4043
4044 /* Scan all instructions and renumber all registers into those
4045 available in leaf functions. */
4046
4047 static void
4048 leaf_renumber_regs (rtx first)
4049 {
4050 rtx insn;
4051
4052 /* Renumber only the actual patterns.
4053 The reg-notes can contain frame pointer refs,
4054 and renumbering them could crash, and should not be needed. */
4055 for (insn = first; insn; insn = NEXT_INSN (insn))
4056 if (INSN_P (insn))
4057 leaf_renumber_regs_insn (PATTERN (insn));
4058 for (insn = crtl->epilogue_delay_list;
4059 insn;
4060 insn = XEXP (insn, 1))
4061 if (INSN_P (XEXP (insn, 0)))
4062 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4063 }
4064
4065 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4066 available in leaf functions. */
4067
4068 void
4069 leaf_renumber_regs_insn (rtx in_rtx)
4070 {
4071 int i, j;
4072 const char *format_ptr;
4073
4074 if (in_rtx == 0)
4075 return;
4076
4077 /* Renumber all input-registers into output-registers.
4078 renumbered_regs would be 1 for an output-register;
4079 they */
4080
4081 if (REG_P (in_rtx))
4082 {
4083 int newreg;
4084
4085 /* Don't renumber the same reg twice. */
4086 if (in_rtx->used)
4087 return;
4088
4089 newreg = REGNO (in_rtx);
4090 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4091 to reach here as part of a REG_NOTE. */
4092 if (newreg >= FIRST_PSEUDO_REGISTER)
4093 {
4094 in_rtx->used = 1;
4095 return;
4096 }
4097 newreg = LEAF_REG_REMAP (newreg);
4098 gcc_assert (newreg >= 0);
4099 df_set_regs_ever_live (REGNO (in_rtx), false);
4100 df_set_regs_ever_live (newreg, true);
4101 SET_REGNO (in_rtx, newreg);
4102 in_rtx->used = 1;
4103 }
4104
4105 if (INSN_P (in_rtx))
4106 {
4107 /* Inside a SEQUENCE, we find insns.
4108 Renumber just the patterns of these insns,
4109 just as we do for the top-level insns. */
4110 leaf_renumber_regs_insn (PATTERN (in_rtx));
4111 return;
4112 }
4113
4114 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4115
4116 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4117 switch (*format_ptr++)
4118 {
4119 case 'e':
4120 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4121 break;
4122
4123 case 'E':
4124 if (NULL != XVEC (in_rtx, i))
4125 {
4126 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4127 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4128 }
4129 break;
4130
4131 case 'S':
4132 case 's':
4133 case '0':
4134 case 'i':
4135 case 'w':
4136 case 'n':
4137 case 'u':
4138 break;
4139
4140 default:
4141 gcc_unreachable ();
4142 }
4143 }
4144 #endif
4145
4146
4147 /* When -gused is used, emit debug info for only used symbols. But in
4148 addition to the standard intercepted debug_hooks there are some direct
4149 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
4150 Those routines may also be called from a higher level intercepted routine. So
4151 to prevent recording data for an inner call to one of these for an intercept,
4152 we maintain an intercept nesting counter (debug_nesting). We only save the
4153 intercepted arguments if the nesting is 1. */
4154 int debug_nesting = 0;
4155
4156 static tree *symbol_queue;
4157 int symbol_queue_index = 0;
4158 static int symbol_queue_size = 0;
4159
4160 /* Generate the symbols for any queued up type symbols we encountered
4161 while generating the type info for some originally used symbol.
4162 This might generate additional entries in the queue. Only when
4163 the nesting depth goes to 0 is this routine called. */
4164
4165 void
4166 debug_flush_symbol_queue (void)
4167 {
4168 int i;
4169
4170 /* Make sure that additionally queued items are not flushed
4171 prematurely. */
4172
4173 ++debug_nesting;
4174
4175 for (i = 0; i < symbol_queue_index; ++i)
4176 {
4177 /* If we pushed queued symbols then such symbols must be
4178 output no matter what anyone else says. Specifically,
4179 we need to make sure dbxout_symbol() thinks the symbol was
4180 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
4181 which may be set for outside reasons. */
4182 int saved_tree_used = TREE_USED (symbol_queue[i]);
4183 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
4184 TREE_USED (symbol_queue[i]) = 1;
4185 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
4186
4187 #ifdef DBX_DEBUGGING_INFO
4188 dbxout_symbol (symbol_queue[i], 0);
4189 #endif
4190
4191 TREE_USED (symbol_queue[i]) = saved_tree_used;
4192 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
4193 }
4194
4195 symbol_queue_index = 0;
4196 --debug_nesting;
4197 }
4198
4199 /* Queue a type symbol needed as part of the definition of a decl
4200 symbol. These symbols are generated when debug_flush_symbol_queue()
4201 is called. */
4202
4203 void
4204 debug_queue_symbol (tree decl)
4205 {
4206 if (symbol_queue_index >= symbol_queue_size)
4207 {
4208 symbol_queue_size += 10;
4209 symbol_queue = XRESIZEVEC (tree, symbol_queue, symbol_queue_size);
4210 }
4211
4212 symbol_queue[symbol_queue_index++] = decl;
4213 }
4214
4215 /* Free symbol queue. */
4216 void
4217 debug_free_queue (void)
4218 {
4219 if (symbol_queue)
4220 {
4221 free (symbol_queue);
4222 symbol_queue = NULL;
4223 symbol_queue_size = 0;
4224 }
4225 }
4226 \f
4227 /* Turn the RTL into assembly. */
4228 static unsigned int
4229 rest_of_handle_final (void)
4230 {
4231 rtx x;
4232 const char *fnname;
4233
4234 /* Get the function's name, as described by its RTL. This may be
4235 different from the DECL_NAME name used in the source file. */
4236
4237 x = DECL_RTL (current_function_decl);
4238 gcc_assert (MEM_P (x));
4239 x = XEXP (x, 0);
4240 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4241 fnname = XSTR (x, 0);
4242
4243 assemble_start_function (current_function_decl, fnname);
4244 final_start_function (get_insns (), asm_out_file, optimize);
4245 final (get_insns (), asm_out_file, optimize);
4246 final_end_function ();
4247
4248 #ifdef TARGET_UNWIND_INFO
4249 /* ??? The IA-64 ".handlerdata" directive must be issued before
4250 the ".endp" directive that closes the procedure descriptor. */
4251 output_function_exception_table (fnname);
4252 #endif
4253
4254 assemble_end_function (current_function_decl, fnname);
4255
4256 #ifndef TARGET_UNWIND_INFO
4257 /* Otherwise, it feels unclean to switch sections in the middle. */
4258 output_function_exception_table (fnname);
4259 #endif
4260
4261 user_defined_section_attribute = false;
4262
4263 /* Free up reg info memory. */
4264 free_reg_info ();
4265
4266 if (! quiet_flag)
4267 fflush (asm_out_file);
4268
4269 /* Write DBX symbols if requested. */
4270
4271 /* Note that for those inline functions where we don't initially
4272 know for certain that we will be generating an out-of-line copy,
4273 the first invocation of this routine (rest_of_compilation) will
4274 skip over this code by doing a `goto exit_rest_of_compilation;'.
4275 Later on, wrapup_global_declarations will (indirectly) call
4276 rest_of_compilation again for those inline functions that need
4277 to have out-of-line copies generated. During that call, we
4278 *will* be routed past here. */
4279
4280 timevar_push (TV_SYMOUT);
4281 if (!DECL_IGNORED_P (current_function_decl))
4282 debug_hooks->function_decl (current_function_decl);
4283 timevar_pop (TV_SYMOUT);
4284
4285 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4286 DECL_INITIAL (current_function_decl) = error_mark_node;
4287
4288 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4289 && targetm.have_ctors_dtors)
4290 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4291 decl_init_priority_lookup
4292 (current_function_decl));
4293 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4294 && targetm.have_ctors_dtors)
4295 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4296 decl_fini_priority_lookup
4297 (current_function_decl));
4298 return 0;
4299 }
4300
4301 struct rtl_opt_pass pass_final =
4302 {
4303 {
4304 RTL_PASS,
4305 "final", /* name */
4306 NULL, /* gate */
4307 rest_of_handle_final, /* execute */
4308 NULL, /* sub */
4309 NULL, /* next */
4310 0, /* static_pass_number */
4311 TV_FINAL, /* tv_id */
4312 0, /* properties_required */
4313 0, /* properties_provided */
4314 0, /* properties_destroyed */
4315 0, /* todo_flags_start */
4316 TODO_ggc_collect /* todo_flags_finish */
4317 }
4318 };
4319
4320
4321 static unsigned int
4322 rest_of_handle_shorten_branches (void)
4323 {
4324 /* Shorten branches. */
4325 shorten_branches (get_insns ());
4326 return 0;
4327 }
4328
4329 struct rtl_opt_pass pass_shorten_branches =
4330 {
4331 {
4332 RTL_PASS,
4333 "shorten", /* name */
4334 NULL, /* gate */
4335 rest_of_handle_shorten_branches, /* execute */
4336 NULL, /* sub */
4337 NULL, /* next */
4338 0, /* static_pass_number */
4339 TV_FINAL, /* tv_id */
4340 0, /* properties_required */
4341 0, /* properties_provided */
4342 0, /* properties_destroyed */
4343 0, /* todo_flags_start */
4344 TODO_dump_func /* todo_flags_finish */
4345 }
4346 };
4347
4348
4349 static unsigned int
4350 rest_of_clean_state (void)
4351 {
4352 rtx insn, next;
4353 FILE *final_output = NULL;
4354 int save_unnumbered = flag_dump_unnumbered;
4355 int save_noaddr = flag_dump_noaddr;
4356
4357 if (flag_dump_final_insns)
4358 {
4359 final_output = fopen (flag_dump_final_insns, "a");
4360 if (!final_output)
4361 {
4362 error ("could not open final insn dump file %qs: %m",
4363 flag_dump_final_insns);
4364 flag_dump_final_insns = NULL;
4365 }
4366 else
4367 {
4368 const char *aname;
4369 struct cgraph_node *node = cgraph_node (current_function_decl);
4370
4371 aname = (IDENTIFIER_POINTER
4372 (DECL_ASSEMBLER_NAME (current_function_decl)));
4373 fprintf (final_output, "\n;; Function (%s) %s\n\n", aname,
4374 node->frequency == NODE_FREQUENCY_HOT
4375 ? " (hot)"
4376 : node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
4377 ? " (unlikely executed)"
4378 : node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
4379 ? " (executed once)"
4380 : "");
4381
4382 flag_dump_noaddr = flag_dump_unnumbered = 1;
4383 if (flag_compare_debug_opt || flag_compare_debug)
4384 dump_flags |= TDF_NOUID;
4385 final_insns_dump_p = true;
4386
4387 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4388 if (LABEL_P (insn))
4389 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4390 else
4391 INSN_UID (insn) = 0;
4392 }
4393 }
4394
4395 /* It is very important to decompose the RTL instruction chain here:
4396 debug information keeps pointing into CODE_LABEL insns inside the function
4397 body. If these remain pointing to the other insns, we end up preserving
4398 whole RTL chain and attached detailed debug info in memory. */
4399 for (insn = get_insns (); insn; insn = next)
4400 {
4401 next = NEXT_INSN (insn);
4402 NEXT_INSN (insn) = NULL;
4403 PREV_INSN (insn) = NULL;
4404
4405 if (final_output
4406 && (!NOTE_P (insn) ||
4407 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4408 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4409 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4410 && NOTE_KIND (insn) != NOTE_INSN_CFA_RESTORE_STATE)))
4411 print_rtl_single (final_output, insn);
4412
4413 }
4414
4415 if (final_output)
4416 {
4417 flag_dump_noaddr = save_noaddr;
4418 flag_dump_unnumbered = save_unnumbered;
4419 final_insns_dump_p = false;
4420
4421 if (fclose (final_output))
4422 {
4423 error ("could not close final insn dump file %qs: %m",
4424 flag_dump_final_insns);
4425 flag_dump_final_insns = NULL;
4426 }
4427 }
4428
4429 /* In case the function was not output,
4430 don't leave any temporary anonymous types
4431 queued up for sdb output. */
4432 #ifdef SDB_DEBUGGING_INFO
4433 if (write_symbols == SDB_DEBUG)
4434 sdbout_types (NULL_TREE);
4435 #endif
4436
4437 flag_rerun_cse_after_global_opts = 0;
4438 reload_completed = 0;
4439 epilogue_completed = 0;
4440 #ifdef STACK_REGS
4441 regstack_completed = 0;
4442 #endif
4443
4444 /* Clear out the insn_length contents now that they are no
4445 longer valid. */
4446 init_insn_lengths ();
4447
4448 /* Show no temporary slots allocated. */
4449 init_temp_slots ();
4450
4451 free_bb_for_insn ();
4452
4453 delete_tree_ssa ();
4454
4455 if (targetm.binds_local_p (current_function_decl))
4456 {
4457 unsigned int pref = crtl->preferred_stack_boundary;
4458 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4459 pref = crtl->stack_alignment_needed;
4460 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4461 = pref;
4462 }
4463
4464 /* Make sure volatile mem refs aren't considered valid operands for
4465 arithmetic insns. We must call this here if this is a nested inline
4466 function, since the above code leaves us in the init_recog state,
4467 and the function context push/pop code does not save/restore volatile_ok.
4468
4469 ??? Maybe it isn't necessary for expand_start_function to call this
4470 anymore if we do it here? */
4471
4472 init_recog_no_volatile ();
4473
4474 /* We're done with this function. Free up memory if we can. */
4475 free_after_parsing (cfun);
4476 free_after_compilation (cfun);
4477 return 0;
4478 }
4479
4480 struct rtl_opt_pass pass_clean_state =
4481 {
4482 {
4483 RTL_PASS,
4484 "*clean_state", /* name */
4485 NULL, /* gate */
4486 rest_of_clean_state, /* execute */
4487 NULL, /* sub */
4488 NULL, /* next */
4489 0, /* static_pass_number */
4490 TV_FINAL, /* tv_id */
4491 0, /* properties_required */
4492 0, /* properties_provided */
4493 PROP_rtl, /* properties_destroyed */
4494 0, /* todo_flags_start */
4495 0 /* todo_flags_finish */
4496 }
4497 };