re PR c++/13478 (gcc uses wrong constructor to initialize a const reference)
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx debug_insn;
116 rtx current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Highest line number in current block. */
122 static int high_block_linenum;
123
124 /* Likewise for function. */
125 static int high_function_linenum;
126
127 /* Filename of last NOTE. */
128 static const char *last_filename;
129
130 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
131
132 /* Nonzero while outputting an `asm' with operands.
133 This means that inconsistencies are the user's fault, so don't abort.
134 The precise value is the insn being output, to pass to error_for_asm. */
135 rtx this_is_asm_operands;
136
137 /* Number of operands of this insn, for an `asm' with operands. */
138 static unsigned int insn_noperands;
139
140 /* Compare optimization flag. */
141
142 static rtx last_ignored_compare = 0;
143
144 /* Assign a unique number to each insn that is output.
145 This can be used to generate unique local labels. */
146
147 static int insn_counter = 0;
148
149 #ifdef HAVE_cc0
150 /* This variable contains machine-dependent flags (defined in tm.h)
151 set and examined by output routines
152 that describe how to interpret the condition codes properly. */
153
154 CC_STATUS cc_status;
155
156 /* During output of an insn, this contains a copy of cc_status
157 from before the insn. */
158
159 CC_STATUS cc_prev_status;
160 #endif
161
162 /* Indexed by hardware reg number, is 1 if that register is ever
163 used in the current function.
164
165 In life_analysis, or in stupid_life_analysis, this is set
166 up to record the hard regs used explicitly. Reload adds
167 in the hard regs used for holding pseudo regs. Final uses
168 it to generate the code in the function prologue and epilogue
169 to save and restore registers as needed. */
170
171 char regs_ever_live[FIRST_PSEUDO_REGISTER];
172
173 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
174 Unlike regs_ever_live, elements of this array corresponding to
175 eliminable regs like the frame pointer are set if an asm sets them. */
176
177 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
178
179 /* Nonzero means current function must be given a frame pointer.
180 Initialized in function.c to 0. Set only in reload1.c as per
181 the needs of the function. */
182
183 int frame_pointer_needed;
184
185 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
186
187 static int block_depth;
188
189 /* Nonzero if have enabled APP processing of our assembler output. */
190
191 static int app_on;
192
193 /* If we are outputting an insn sequence, this contains the sequence rtx.
194 Zero otherwise. */
195
196 rtx final_sequence;
197
198 #ifdef ASSEMBLER_DIALECT
199
200 /* Number of the assembler dialect to use, starting at 0. */
201 static int dialect_number;
202 #endif
203
204 /* Indexed by line number, nonzero if there is a note for that line. */
205
206 static char *line_note_exists;
207
208 #ifdef HAVE_conditional_execution
209 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
210 rtx current_insn_predicate;
211 #endif
212
213 #ifdef HAVE_ATTR_length
214 static int asm_insn_count (rtx);
215 #endif
216 static void profile_function (FILE *);
217 static void profile_after_prologue (FILE *);
218 static bool notice_source_line (rtx);
219 static rtx walk_alter_subreg (rtx *);
220 static void output_asm_name (void);
221 static void output_alternate_entry_point (FILE *, rtx);
222 static tree get_mem_expr_from_op (rtx, int *);
223 static void output_asm_operand_names (rtx *, int *, int);
224 static void output_operand (rtx, int);
225 #ifdef LEAF_REGISTERS
226 static void leaf_renumber_regs (rtx);
227 #endif
228 #ifdef HAVE_cc0
229 static int alter_cond (rtx);
230 #endif
231 #ifndef ADDR_VEC_ALIGN
232 static int final_addr_vec_align (rtx);
233 #endif
234 #ifdef HAVE_ATTR_length
235 static int align_fuzz (rtx, rtx, int, unsigned);
236 #endif
237 \f
238 /* Initialize data in final at the beginning of a compilation. */
239
240 void
241 init_final (const char *filename ATTRIBUTE_UNUSED)
242 {
243 app_on = 0;
244 final_sequence = 0;
245
246 #ifdef ASSEMBLER_DIALECT
247 dialect_number = ASSEMBLER_DIALECT;
248 #endif
249 }
250
251 /* Default target function prologue and epilogue assembler output.
252
253 If not overridden for epilogue code, then the function body itself
254 contains return instructions wherever needed. */
255 void
256 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
257 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
258 {
259 }
260
261 /* Default target hook that outputs nothing to a stream. */
262 void
263 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
264 {
265 }
266
267 /* Enable APP processing of subsequent output.
268 Used before the output from an `asm' statement. */
269
270 void
271 app_enable (void)
272 {
273 if (! app_on)
274 {
275 fputs (ASM_APP_ON, asm_out_file);
276 app_on = 1;
277 }
278 }
279
280 /* Disable APP processing of subsequent output.
281 Called from varasm.c before most kinds of output. */
282
283 void
284 app_disable (void)
285 {
286 if (app_on)
287 {
288 fputs (ASM_APP_OFF, asm_out_file);
289 app_on = 0;
290 }
291 }
292 \f
293 /* Return the number of slots filled in the current
294 delayed branch sequence (we don't count the insn needing the
295 delay slot). Zero if not in a delayed branch sequence. */
296
297 #ifdef DELAY_SLOTS
298 int
299 dbr_sequence_length (void)
300 {
301 if (final_sequence != 0)
302 return XVECLEN (final_sequence, 0) - 1;
303 else
304 return 0;
305 }
306 #endif
307 \f
308 /* The next two pages contain routines used to compute the length of an insn
309 and to shorten branches. */
310
311 /* Arrays for insn lengths, and addresses. The latter is referenced by
312 `insn_current_length'. */
313
314 static int *insn_lengths;
315
316 varray_type insn_addresses_;
317
318 /* Max uid for which the above arrays are valid. */
319 static int insn_lengths_max_uid;
320
321 /* Address of insn being processed. Used by `insn_current_length'. */
322 int insn_current_address;
323
324 /* Address of insn being processed in previous iteration. */
325 int insn_last_address;
326
327 /* known invariant alignment of insn being processed. */
328 int insn_current_align;
329
330 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
331 gives the next following alignment insn that increases the known
332 alignment, or NULL_RTX if there is no such insn.
333 For any alignment obtained this way, we can again index uid_align with
334 its uid to obtain the next following align that in turn increases the
335 alignment, till we reach NULL_RTX; the sequence obtained this way
336 for each insn we'll call the alignment chain of this insn in the following
337 comments. */
338
339 struct label_alignment
340 {
341 short alignment;
342 short max_skip;
343 };
344
345 static rtx *uid_align;
346 static int *uid_shuid;
347 static struct label_alignment *label_align;
348
349 /* Indicate that branch shortening hasn't yet been done. */
350
351 void
352 init_insn_lengths (void)
353 {
354 if (uid_shuid)
355 {
356 free (uid_shuid);
357 uid_shuid = 0;
358 }
359 if (insn_lengths)
360 {
361 free (insn_lengths);
362 insn_lengths = 0;
363 insn_lengths_max_uid = 0;
364 }
365 #ifdef HAVE_ATTR_length
366 INSN_ADDRESSES_FREE ();
367 #endif
368 if (uid_align)
369 {
370 free (uid_align);
371 uid_align = 0;
372 }
373 }
374
375 /* Obtain the current length of an insn. If branch shortening has been done,
376 get its actual length. Otherwise, get its maximum length. */
377
378 int
379 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
380 {
381 #ifdef HAVE_ATTR_length
382 rtx body;
383 int i;
384 int length = 0;
385
386 if (insn_lengths_max_uid > INSN_UID (insn))
387 return insn_lengths[INSN_UID (insn)];
388 else
389 switch (GET_CODE (insn))
390 {
391 case NOTE:
392 case BARRIER:
393 case CODE_LABEL:
394 return 0;
395
396 case CALL_INSN:
397 length = insn_default_length (insn);
398 break;
399
400 case JUMP_INSN:
401 body = PATTERN (insn);
402 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 {
404 /* Alignment is machine-dependent and should be handled by
405 ADDR_VEC_ALIGN. */
406 }
407 else
408 length = insn_default_length (insn);
409 break;
410
411 case INSN:
412 body = PATTERN (insn);
413 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
414 return 0;
415
416 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
417 length = asm_insn_count (body) * insn_default_length (insn);
418 else if (GET_CODE (body) == SEQUENCE)
419 for (i = 0; i < XVECLEN (body, 0); i++)
420 length += get_attr_length (XVECEXP (body, 0, i));
421 else
422 length = insn_default_length (insn);
423 break;
424
425 default:
426 break;
427 }
428
429 #ifdef ADJUST_INSN_LENGTH
430 ADJUST_INSN_LENGTH (insn, length);
431 #endif
432 return length;
433 #else /* not HAVE_ATTR_length */
434 return 0;
435 #endif /* not HAVE_ATTR_length */
436 }
437 \f
438 /* Code to handle alignment inside shorten_branches. */
439
440 /* Here is an explanation how the algorithm in align_fuzz can give
441 proper results:
442
443 Call a sequence of instructions beginning with alignment point X
444 and continuing until the next alignment point `block X'. When `X'
445 is used in an expression, it means the alignment value of the
446 alignment point.
447
448 Call the distance between the start of the first insn of block X, and
449 the end of the last insn of block X `IX', for the `inner size of X'.
450 This is clearly the sum of the instruction lengths.
451
452 Likewise with the next alignment-delimited block following X, which we
453 shall call block Y.
454
455 Call the distance between the start of the first insn of block X, and
456 the start of the first insn of block Y `OX', for the `outer size of X'.
457
458 The estimated padding is then OX - IX.
459
460 OX can be safely estimated as
461
462 if (X >= Y)
463 OX = round_up(IX, Y)
464 else
465 OX = round_up(IX, X) + Y - X
466
467 Clearly est(IX) >= real(IX), because that only depends on the
468 instruction lengths, and those being overestimated is a given.
469
470 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
471 we needn't worry about that when thinking about OX.
472
473 When X >= Y, the alignment provided by Y adds no uncertainty factor
474 for branch ranges starting before X, so we can just round what we have.
475 But when X < Y, we don't know anything about the, so to speak,
476 `middle bits', so we have to assume the worst when aligning up from an
477 address mod X to one mod Y, which is Y - X. */
478
479 #ifndef LABEL_ALIGN
480 #define LABEL_ALIGN(LABEL) align_labels_log
481 #endif
482
483 #ifndef LABEL_ALIGN_MAX_SKIP
484 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
485 #endif
486
487 #ifndef LOOP_ALIGN
488 #define LOOP_ALIGN(LABEL) align_loops_log
489 #endif
490
491 #ifndef LOOP_ALIGN_MAX_SKIP
492 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
493 #endif
494
495 #ifndef LABEL_ALIGN_AFTER_BARRIER
496 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
497 #endif
498
499 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
500 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
501 #endif
502
503 #ifndef JUMP_ALIGN
504 #define JUMP_ALIGN(LABEL) align_jumps_log
505 #endif
506
507 #ifndef JUMP_ALIGN_MAX_SKIP
508 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
509 #endif
510
511 #ifndef ADDR_VEC_ALIGN
512 static int
513 final_addr_vec_align (rtx addr_vec)
514 {
515 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
516
517 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
518 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
519 return exact_log2 (align);
520
521 }
522
523 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
524 #endif
525
526 #ifndef INSN_LENGTH_ALIGNMENT
527 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
528 #endif
529
530 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
531
532 static int min_labelno, max_labelno;
533
534 #define LABEL_TO_ALIGNMENT(LABEL) \
535 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
536
537 #define LABEL_TO_MAX_SKIP(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
539
540 /* For the benefit of port specific code do this also as a function. */
541
542 int
543 label_to_alignment (rtx label)
544 {
545 return LABEL_TO_ALIGNMENT (label);
546 }
547
548 #ifdef HAVE_ATTR_length
549 /* The differences in addresses
550 between a branch and its target might grow or shrink depending on
551 the alignment the start insn of the range (the branch for a forward
552 branch or the label for a backward branch) starts out on; if these
553 differences are used naively, they can even oscillate infinitely.
554 We therefore want to compute a 'worst case' address difference that
555 is independent of the alignment the start insn of the range end
556 up on, and that is at least as large as the actual difference.
557 The function align_fuzz calculates the amount we have to add to the
558 naively computed difference, by traversing the part of the alignment
559 chain of the start insn of the range that is in front of the end insn
560 of the range, and considering for each alignment the maximum amount
561 that it might contribute to a size increase.
562
563 For casesi tables, we also want to know worst case minimum amounts of
564 address difference, in case a machine description wants to introduce
565 some common offset that is added to all offsets in a table.
566 For this purpose, align_fuzz with a growth argument of 0 computes the
567 appropriate adjustment. */
568
569 /* Compute the maximum delta by which the difference of the addresses of
570 START and END might grow / shrink due to a different address for start
571 which changes the size of alignment insns between START and END.
572 KNOWN_ALIGN_LOG is the alignment known for START.
573 GROWTH should be ~0 if the objective is to compute potential code size
574 increase, and 0 if the objective is to compute potential shrink.
575 The return value is undefined for any other value of GROWTH. */
576
577 static int
578 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
579 {
580 int uid = INSN_UID (start);
581 rtx align_label;
582 int known_align = 1 << known_align_log;
583 int end_shuid = INSN_SHUID (end);
584 int fuzz = 0;
585
586 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
587 {
588 int align_addr, new_align;
589
590 uid = INSN_UID (align_label);
591 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
592 if (uid_shuid[uid] > end_shuid)
593 break;
594 known_align_log = LABEL_TO_ALIGNMENT (align_label);
595 new_align = 1 << known_align_log;
596 if (new_align < known_align)
597 continue;
598 fuzz += (-align_addr ^ growth) & (new_align - known_align);
599 known_align = new_align;
600 }
601 return fuzz;
602 }
603
604 /* Compute a worst-case reference address of a branch so that it
605 can be safely used in the presence of aligned labels. Since the
606 size of the branch itself is unknown, the size of the branch is
607 not included in the range. I.e. for a forward branch, the reference
608 address is the end address of the branch as known from the previous
609 branch shortening pass, minus a value to account for possible size
610 increase due to alignment. For a backward branch, it is the start
611 address of the branch as known from the current pass, plus a value
612 to account for possible size increase due to alignment.
613 NB.: Therefore, the maximum offset allowed for backward branches needs
614 to exclude the branch size. */
615
616 int
617 insn_current_reference_address (rtx branch)
618 {
619 rtx dest, seq;
620 int seq_uid;
621
622 if (! INSN_ADDRESSES_SET_P ())
623 return 0;
624
625 seq = NEXT_INSN (PREV_INSN (branch));
626 seq_uid = INSN_UID (seq);
627 if (GET_CODE (branch) != JUMP_INSN)
628 /* This can happen for example on the PA; the objective is to know the
629 offset to address something in front of the start of the function.
630 Thus, we can treat it like a backward branch.
631 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
632 any alignment we'd encounter, so we skip the call to align_fuzz. */
633 return insn_current_address;
634 dest = JUMP_LABEL (branch);
635
636 /* BRANCH has no proper alignment chain set, so use SEQ.
637 BRANCH also has no INSN_SHUID. */
638 if (INSN_SHUID (seq) < INSN_SHUID (dest))
639 {
640 /* Forward branch. */
641 return (insn_last_address + insn_lengths[seq_uid]
642 - align_fuzz (seq, dest, length_unit_log, ~0));
643 }
644 else
645 {
646 /* Backward branch. */
647 return (insn_current_address
648 + align_fuzz (dest, seq, length_unit_log, ~0));
649 }
650 }
651 #endif /* HAVE_ATTR_length */
652 \f
653 void
654 compute_alignments (void)
655 {
656 int log, max_skip, max_log;
657 basic_block bb;
658
659 if (label_align)
660 {
661 free (label_align);
662 label_align = 0;
663 }
664
665 max_labelno = max_label_num ();
666 min_labelno = get_first_label_num ();
667 label_align = xcalloc (max_labelno - min_labelno + 1,
668 sizeof (struct label_alignment));
669
670 /* If not optimizing or optimizing for size, don't assign any alignments. */
671 if (! optimize || optimize_size)
672 return;
673
674 FOR_EACH_BB (bb)
675 {
676 rtx label = BB_HEAD (bb);
677 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
678 edge e;
679
680 if (GET_CODE (label) != CODE_LABEL
681 || probably_never_executed_bb_p (bb))
682 continue;
683 max_log = LABEL_ALIGN (label);
684 max_skip = LABEL_ALIGN_MAX_SKIP;
685
686 for (e = bb->pred; e; e = e->pred_next)
687 {
688 if (e->flags & EDGE_FALLTHRU)
689 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
690 else
691 branch_frequency += EDGE_FREQUENCY (e);
692 }
693
694 /* There are two purposes to align block with no fallthru incoming edge:
695 1) to avoid fetch stalls when branch destination is near cache boundary
696 2) to improve cache efficiency in case the previous block is not executed
697 (so it does not need to be in the cache).
698
699 We to catch first case, we align frequently executed blocks.
700 To catch the second, we align blocks that are executed more frequently
701 than the predecessor and the predecessor is likely to not be executed
702 when function is called. */
703
704 if (!has_fallthru
705 && (branch_frequency > BB_FREQ_MAX / 10
706 || (bb->frequency > bb->prev_bb->frequency * 10
707 && (bb->prev_bb->frequency
708 <= ENTRY_BLOCK_PTR->frequency / 2))))
709 {
710 log = JUMP_ALIGN (label);
711 if (max_log < log)
712 {
713 max_log = log;
714 max_skip = JUMP_ALIGN_MAX_SKIP;
715 }
716 }
717 /* In case block is frequent and reached mostly by non-fallthru edge,
718 align it. It is most likely a first block of loop. */
719 if (has_fallthru
720 && maybe_hot_bb_p (bb)
721 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
722 && branch_frequency > fallthru_frequency * 2)
723 {
724 log = LOOP_ALIGN (label);
725 if (max_log < log)
726 {
727 max_log = log;
728 max_skip = LOOP_ALIGN_MAX_SKIP;
729 }
730 }
731 LABEL_TO_ALIGNMENT (label) = max_log;
732 LABEL_TO_MAX_SKIP (label) = max_skip;
733 }
734 }
735 \f
736 /* Make a pass over all insns and compute their actual lengths by shortening
737 any branches of variable length if possible. */
738
739 /* shorten_branches might be called multiple times: for example, the SH
740 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
741 In order to do this, it needs proper length information, which it obtains
742 by calling shorten_branches. This cannot be collapsed with
743 shorten_branches itself into a single pass unless we also want to integrate
744 reorg.c, since the branch splitting exposes new instructions with delay
745 slots. */
746
747 void
748 shorten_branches (rtx first ATTRIBUTE_UNUSED)
749 {
750 rtx insn;
751 int max_uid;
752 int i;
753 int max_log;
754 int max_skip;
755 #ifdef HAVE_ATTR_length
756 #define MAX_CODE_ALIGN 16
757 rtx seq;
758 int something_changed = 1;
759 char *varying_length;
760 rtx body;
761 int uid;
762 rtx align_tab[MAX_CODE_ALIGN];
763
764 #endif
765
766 /* Compute maximum UID and allocate label_align / uid_shuid. */
767 max_uid = get_max_uid ();
768
769 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
770
771 if (max_labelno != max_label_num ())
772 {
773 int old = max_labelno;
774 int n_labels;
775 int n_old_labels;
776
777 max_labelno = max_label_num ();
778
779 n_labels = max_labelno - min_labelno + 1;
780 n_old_labels = old - min_labelno + 1;
781
782 label_align = xrealloc (label_align,
783 n_labels * sizeof (struct label_alignment));
784
785 /* Range of labels grows monotonically in the function. Abort here
786 means that the initialization of array got lost. */
787 if (n_old_labels > n_labels)
788 abort ();
789
790 memset (label_align + n_old_labels, 0,
791 (n_labels - n_old_labels) * sizeof (struct label_alignment));
792 }
793
794 /* Initialize label_align and set up uid_shuid to be strictly
795 monotonically rising with insn order. */
796 /* We use max_log here to keep track of the maximum alignment we want to
797 impose on the next CODE_LABEL (or the current one if we are processing
798 the CODE_LABEL itself). */
799
800 max_log = 0;
801 max_skip = 0;
802
803 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
804 {
805 int log;
806
807 INSN_SHUID (insn) = i++;
808 if (INSN_P (insn))
809 {
810 /* reorg might make the first insn of a loop being run once only,
811 and delete the label in front of it. Then we want to apply
812 the loop alignment to the new label created by reorg, which
813 is separated by the former loop start insn from the
814 NOTE_INSN_LOOP_BEG. */
815 }
816 else if (GET_CODE (insn) == CODE_LABEL)
817 {
818 rtx next;
819
820 /* Merge in alignments computed by compute_alignments. */
821 log = LABEL_TO_ALIGNMENT (insn);
822 if (max_log < log)
823 {
824 max_log = log;
825 max_skip = LABEL_TO_MAX_SKIP (insn);
826 }
827
828 log = LABEL_ALIGN (insn);
829 if (max_log < log)
830 {
831 max_log = log;
832 max_skip = LABEL_ALIGN_MAX_SKIP;
833 }
834 next = NEXT_INSN (insn);
835 /* ADDR_VECs only take room if read-only data goes into the text
836 section. */
837 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
838 if (next && GET_CODE (next) == JUMP_INSN)
839 {
840 rtx nextbody = PATTERN (next);
841 if (GET_CODE (nextbody) == ADDR_VEC
842 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
843 {
844 log = ADDR_VEC_ALIGN (next);
845 if (max_log < log)
846 {
847 max_log = log;
848 max_skip = LABEL_ALIGN_MAX_SKIP;
849 }
850 }
851 }
852 LABEL_TO_ALIGNMENT (insn) = max_log;
853 LABEL_TO_MAX_SKIP (insn) = max_skip;
854 max_log = 0;
855 max_skip = 0;
856 }
857 else if (GET_CODE (insn) == BARRIER)
858 {
859 rtx label;
860
861 for (label = insn; label && ! INSN_P (label);
862 label = NEXT_INSN (label))
863 if (GET_CODE (label) == CODE_LABEL)
864 {
865 log = LABEL_ALIGN_AFTER_BARRIER (insn);
866 if (max_log < log)
867 {
868 max_log = log;
869 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
870 }
871 break;
872 }
873 }
874 }
875 #ifdef HAVE_ATTR_length
876
877 /* Allocate the rest of the arrays. */
878 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
879 insn_lengths_max_uid = max_uid;
880 /* Syntax errors can lead to labels being outside of the main insn stream.
881 Initialize insn_addresses, so that we get reproducible results. */
882 INSN_ADDRESSES_ALLOC (max_uid);
883
884 varying_length = xcalloc (max_uid, sizeof (char));
885
886 /* Initialize uid_align. We scan instructions
887 from end to start, and keep in align_tab[n] the last seen insn
888 that does an alignment of at least n+1, i.e. the successor
889 in the alignment chain for an insn that does / has a known
890 alignment of n. */
891 uid_align = xcalloc (max_uid, sizeof *uid_align);
892
893 for (i = MAX_CODE_ALIGN; --i >= 0;)
894 align_tab[i] = NULL_RTX;
895 seq = get_last_insn ();
896 for (; seq; seq = PREV_INSN (seq))
897 {
898 int uid = INSN_UID (seq);
899 int log;
900 log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
901 uid_align[uid] = align_tab[0];
902 if (log)
903 {
904 /* Found an alignment label. */
905 uid_align[uid] = align_tab[log];
906 for (i = log - 1; i >= 0; i--)
907 align_tab[i] = seq;
908 }
909 }
910 #ifdef CASE_VECTOR_SHORTEN_MODE
911 if (optimize)
912 {
913 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
914 label fields. */
915
916 int min_shuid = INSN_SHUID (get_insns ()) - 1;
917 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
918 int rel;
919
920 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
921 {
922 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
923 int len, i, min, max, insn_shuid;
924 int min_align;
925 addr_diff_vec_flags flags;
926
927 if (GET_CODE (insn) != JUMP_INSN
928 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
929 continue;
930 pat = PATTERN (insn);
931 len = XVECLEN (pat, 1);
932 if (len <= 0)
933 abort ();
934 min_align = MAX_CODE_ALIGN;
935 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
936 {
937 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
938 int shuid = INSN_SHUID (lab);
939 if (shuid < min)
940 {
941 min = shuid;
942 min_lab = lab;
943 }
944 if (shuid > max)
945 {
946 max = shuid;
947 max_lab = lab;
948 }
949 if (min_align > LABEL_TO_ALIGNMENT (lab))
950 min_align = LABEL_TO_ALIGNMENT (lab);
951 }
952 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
953 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
954 insn_shuid = INSN_SHUID (insn);
955 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
956 flags.min_align = min_align;
957 flags.base_after_vec = rel > insn_shuid;
958 flags.min_after_vec = min > insn_shuid;
959 flags.max_after_vec = max > insn_shuid;
960 flags.min_after_base = min > rel;
961 flags.max_after_base = max > rel;
962 ADDR_DIFF_VEC_FLAGS (pat) = flags;
963 }
964 }
965 #endif /* CASE_VECTOR_SHORTEN_MODE */
966
967 /* Compute initial lengths, addresses, and varying flags for each insn. */
968 for (insn_current_address = 0, insn = first;
969 insn != 0;
970 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
971 {
972 uid = INSN_UID (insn);
973
974 insn_lengths[uid] = 0;
975
976 if (GET_CODE (insn) == CODE_LABEL)
977 {
978 int log = LABEL_TO_ALIGNMENT (insn);
979 if (log)
980 {
981 int align = 1 << log;
982 int new_address = (insn_current_address + align - 1) & -align;
983 insn_lengths[uid] = new_address - insn_current_address;
984 }
985 }
986
987 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
988
989 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
990 || GET_CODE (insn) == CODE_LABEL)
991 continue;
992 if (INSN_DELETED_P (insn))
993 continue;
994
995 body = PATTERN (insn);
996 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
997 {
998 /* This only takes room if read-only data goes into the text
999 section. */
1000 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1001 insn_lengths[uid] = (XVECLEN (body,
1002 GET_CODE (body) == ADDR_DIFF_VEC)
1003 * GET_MODE_SIZE (GET_MODE (body)));
1004 /* Alignment is handled by ADDR_VEC_ALIGN. */
1005 }
1006 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1007 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1008 else if (GET_CODE (body) == SEQUENCE)
1009 {
1010 int i;
1011 int const_delay_slots;
1012 #ifdef DELAY_SLOTS
1013 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1014 #else
1015 const_delay_slots = 0;
1016 #endif
1017 /* Inside a delay slot sequence, we do not do any branch shortening
1018 if the shortening could change the number of delay slots
1019 of the branch. */
1020 for (i = 0; i < XVECLEN (body, 0); i++)
1021 {
1022 rtx inner_insn = XVECEXP (body, 0, i);
1023 int inner_uid = INSN_UID (inner_insn);
1024 int inner_length;
1025
1026 if (GET_CODE (body) == ASM_INPUT
1027 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1028 inner_length = (asm_insn_count (PATTERN (inner_insn))
1029 * insn_default_length (inner_insn));
1030 else
1031 inner_length = insn_default_length (inner_insn);
1032
1033 insn_lengths[inner_uid] = inner_length;
1034 if (const_delay_slots)
1035 {
1036 if ((varying_length[inner_uid]
1037 = insn_variable_length_p (inner_insn)) != 0)
1038 varying_length[uid] = 1;
1039 INSN_ADDRESSES (inner_uid) = (insn_current_address
1040 + insn_lengths[uid]);
1041 }
1042 else
1043 varying_length[inner_uid] = 0;
1044 insn_lengths[uid] += inner_length;
1045 }
1046 }
1047 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1048 {
1049 insn_lengths[uid] = insn_default_length (insn);
1050 varying_length[uid] = insn_variable_length_p (insn);
1051 }
1052
1053 /* If needed, do any adjustment. */
1054 #ifdef ADJUST_INSN_LENGTH
1055 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1056 if (insn_lengths[uid] < 0)
1057 fatal_insn ("negative insn length", insn);
1058 #endif
1059 }
1060
1061 /* Now loop over all the insns finding varying length insns. For each,
1062 get the current insn length. If it has changed, reflect the change.
1063 When nothing changes for a full pass, we are done. */
1064
1065 while (something_changed)
1066 {
1067 something_changed = 0;
1068 insn_current_align = MAX_CODE_ALIGN - 1;
1069 for (insn_current_address = 0, insn = first;
1070 insn != 0;
1071 insn = NEXT_INSN (insn))
1072 {
1073 int new_length;
1074 #ifdef ADJUST_INSN_LENGTH
1075 int tmp_length;
1076 #endif
1077 int length_align;
1078
1079 uid = INSN_UID (insn);
1080
1081 if (GET_CODE (insn) == CODE_LABEL)
1082 {
1083 int log = LABEL_TO_ALIGNMENT (insn);
1084 if (log > insn_current_align)
1085 {
1086 int align = 1 << log;
1087 int new_address= (insn_current_address + align - 1) & -align;
1088 insn_lengths[uid] = new_address - insn_current_address;
1089 insn_current_align = log;
1090 insn_current_address = new_address;
1091 }
1092 else
1093 insn_lengths[uid] = 0;
1094 INSN_ADDRESSES (uid) = insn_current_address;
1095 continue;
1096 }
1097
1098 length_align = INSN_LENGTH_ALIGNMENT (insn);
1099 if (length_align < insn_current_align)
1100 insn_current_align = length_align;
1101
1102 insn_last_address = INSN_ADDRESSES (uid);
1103 INSN_ADDRESSES (uid) = insn_current_address;
1104
1105 #ifdef CASE_VECTOR_SHORTEN_MODE
1106 if (optimize && GET_CODE (insn) == JUMP_INSN
1107 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1108 {
1109 rtx body = PATTERN (insn);
1110 int old_length = insn_lengths[uid];
1111 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1112 rtx min_lab = XEXP (XEXP (body, 2), 0);
1113 rtx max_lab = XEXP (XEXP (body, 3), 0);
1114 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1115 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1116 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1117 rtx prev;
1118 int rel_align = 0;
1119 addr_diff_vec_flags flags;
1120
1121 /* Avoid automatic aggregate initialization. */
1122 flags = ADDR_DIFF_VEC_FLAGS (body);
1123
1124 /* Try to find a known alignment for rel_lab. */
1125 for (prev = rel_lab;
1126 prev
1127 && ! insn_lengths[INSN_UID (prev)]
1128 && ! (varying_length[INSN_UID (prev)] & 1);
1129 prev = PREV_INSN (prev))
1130 if (varying_length[INSN_UID (prev)] & 2)
1131 {
1132 rel_align = LABEL_TO_ALIGNMENT (prev);
1133 break;
1134 }
1135
1136 /* See the comment on addr_diff_vec_flags in rtl.h for the
1137 meaning of the flags values. base: REL_LAB vec: INSN */
1138 /* Anything after INSN has still addresses from the last
1139 pass; adjust these so that they reflect our current
1140 estimate for this pass. */
1141 if (flags.base_after_vec)
1142 rel_addr += insn_current_address - insn_last_address;
1143 if (flags.min_after_vec)
1144 min_addr += insn_current_address - insn_last_address;
1145 if (flags.max_after_vec)
1146 max_addr += insn_current_address - insn_last_address;
1147 /* We want to know the worst case, i.e. lowest possible value
1148 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1149 its offset is positive, and we have to be wary of code shrink;
1150 otherwise, it is negative, and we have to be vary of code
1151 size increase. */
1152 if (flags.min_after_base)
1153 {
1154 /* If INSN is between REL_LAB and MIN_LAB, the size
1155 changes we are about to make can change the alignment
1156 within the observed offset, therefore we have to break
1157 it up into two parts that are independent. */
1158 if (! flags.base_after_vec && flags.min_after_vec)
1159 {
1160 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1161 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1162 }
1163 else
1164 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1165 }
1166 else
1167 {
1168 if (flags.base_after_vec && ! flags.min_after_vec)
1169 {
1170 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1171 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1172 }
1173 else
1174 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1175 }
1176 /* Likewise, determine the highest lowest possible value
1177 for the offset of MAX_LAB. */
1178 if (flags.max_after_base)
1179 {
1180 if (! flags.base_after_vec && flags.max_after_vec)
1181 {
1182 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1183 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1184 }
1185 else
1186 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1187 }
1188 else
1189 {
1190 if (flags.base_after_vec && ! flags.max_after_vec)
1191 {
1192 max_addr += align_fuzz (max_lab, insn, 0, 0);
1193 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1194 }
1195 else
1196 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1197 }
1198 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1199 max_addr - rel_addr,
1200 body));
1201 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1202 {
1203 insn_lengths[uid]
1204 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1205 insn_current_address += insn_lengths[uid];
1206 if (insn_lengths[uid] != old_length)
1207 something_changed = 1;
1208 }
1209
1210 continue;
1211 }
1212 #endif /* CASE_VECTOR_SHORTEN_MODE */
1213
1214 if (! (varying_length[uid]))
1215 {
1216 if (GET_CODE (insn) == INSN
1217 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1218 {
1219 int i;
1220
1221 body = PATTERN (insn);
1222 for (i = 0; i < XVECLEN (body, 0); i++)
1223 {
1224 rtx inner_insn = XVECEXP (body, 0, i);
1225 int inner_uid = INSN_UID (inner_insn);
1226
1227 INSN_ADDRESSES (inner_uid) = insn_current_address;
1228
1229 insn_current_address += insn_lengths[inner_uid];
1230 }
1231 }
1232 else
1233 insn_current_address += insn_lengths[uid];
1234
1235 continue;
1236 }
1237
1238 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1239 {
1240 int i;
1241
1242 body = PATTERN (insn);
1243 new_length = 0;
1244 for (i = 0; i < XVECLEN (body, 0); i++)
1245 {
1246 rtx inner_insn = XVECEXP (body, 0, i);
1247 int inner_uid = INSN_UID (inner_insn);
1248 int inner_length;
1249
1250 INSN_ADDRESSES (inner_uid) = insn_current_address;
1251
1252 /* insn_current_length returns 0 for insns with a
1253 non-varying length. */
1254 if (! varying_length[inner_uid])
1255 inner_length = insn_lengths[inner_uid];
1256 else
1257 inner_length = insn_current_length (inner_insn);
1258
1259 if (inner_length != insn_lengths[inner_uid])
1260 {
1261 insn_lengths[inner_uid] = inner_length;
1262 something_changed = 1;
1263 }
1264 insn_current_address += insn_lengths[inner_uid];
1265 new_length += inner_length;
1266 }
1267 }
1268 else
1269 {
1270 new_length = insn_current_length (insn);
1271 insn_current_address += new_length;
1272 }
1273
1274 #ifdef ADJUST_INSN_LENGTH
1275 /* If needed, do any adjustment. */
1276 tmp_length = new_length;
1277 ADJUST_INSN_LENGTH (insn, new_length);
1278 insn_current_address += (new_length - tmp_length);
1279 #endif
1280
1281 if (new_length != insn_lengths[uid])
1282 {
1283 insn_lengths[uid] = new_length;
1284 something_changed = 1;
1285 }
1286 }
1287 /* For a non-optimizing compile, do only a single pass. */
1288 if (!optimize)
1289 break;
1290 }
1291
1292 free (varying_length);
1293
1294 #endif /* HAVE_ATTR_length */
1295 }
1296
1297 #ifdef HAVE_ATTR_length
1298 /* Given the body of an INSN known to be generated by an ASM statement, return
1299 the number of machine instructions likely to be generated for this insn.
1300 This is used to compute its length. */
1301
1302 static int
1303 asm_insn_count (rtx body)
1304 {
1305 const char *template;
1306 int count = 1;
1307
1308 if (GET_CODE (body) == ASM_INPUT)
1309 template = XSTR (body, 0);
1310 else
1311 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1312
1313 for (; *template; template++)
1314 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1315 count++;
1316
1317 return count;
1318 }
1319 #endif
1320 \f
1321 /* Output assembler code for the start of a function,
1322 and initialize some of the variables in this file
1323 for the new function. The label for the function and associated
1324 assembler pseudo-ops have already been output in `assemble_start_function'.
1325
1326 FIRST is the first insn of the rtl for the function being compiled.
1327 FILE is the file to write assembler code to.
1328 OPTIMIZE is nonzero if we should eliminate redundant
1329 test and compare insns. */
1330
1331 void
1332 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1333 int optimize ATTRIBUTE_UNUSED)
1334 {
1335 block_depth = 0;
1336
1337 this_is_asm_operands = 0;
1338
1339 last_filename = locator_file (prologue_locator);
1340 last_linenum = locator_line (prologue_locator);
1341
1342 high_block_linenum = high_function_linenum = last_linenum;
1343
1344 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1345
1346 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1347 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1348 dwarf2out_begin_prologue (0, NULL);
1349 #endif
1350
1351 #ifdef LEAF_REG_REMAP
1352 if (current_function_uses_only_leaf_regs)
1353 leaf_renumber_regs (first);
1354 #endif
1355
1356 /* The Sun386i and perhaps other machines don't work right
1357 if the profiling code comes after the prologue. */
1358 #ifdef PROFILE_BEFORE_PROLOGUE
1359 if (current_function_profile)
1360 profile_function (file);
1361 #endif /* PROFILE_BEFORE_PROLOGUE */
1362
1363 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1364 if (dwarf2out_do_frame ())
1365 dwarf2out_frame_debug (NULL_RTX);
1366 #endif
1367
1368 /* If debugging, assign block numbers to all of the blocks in this
1369 function. */
1370 if (write_symbols)
1371 {
1372 remove_unnecessary_notes ();
1373 reemit_insn_block_notes ();
1374 number_blocks (current_function_decl);
1375 /* We never actually put out begin/end notes for the top-level
1376 block in the function. But, conceptually, that block is
1377 always needed. */
1378 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1379 }
1380
1381 /* First output the function prologue: code to set up the stack frame. */
1382 (*targetm.asm_out.function_prologue) (file, get_frame_size ());
1383
1384 /* If the machine represents the prologue as RTL, the profiling code must
1385 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1386 #ifdef HAVE_prologue
1387 if (! HAVE_prologue)
1388 #endif
1389 profile_after_prologue (file);
1390 }
1391
1392 static void
1393 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1394 {
1395 #ifndef PROFILE_BEFORE_PROLOGUE
1396 if (current_function_profile)
1397 profile_function (file);
1398 #endif /* not PROFILE_BEFORE_PROLOGUE */
1399 }
1400
1401 static void
1402 profile_function (FILE *file ATTRIBUTE_UNUSED)
1403 {
1404 #ifndef NO_PROFILE_COUNTERS
1405 # define NO_PROFILE_COUNTERS 0
1406 #endif
1407 #if defined(ASM_OUTPUT_REG_PUSH)
1408 int sval = current_function_returns_struct;
1409 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1410 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1411 int cxt = current_function_needs_context;
1412 #endif
1413 #endif /* ASM_OUTPUT_REG_PUSH */
1414
1415 if (! NO_PROFILE_COUNTERS)
1416 {
1417 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1418 data_section ();
1419 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1420 (*targetm.asm_out.internal_label) (file, "LP", current_function_funcdef_no);
1421 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1422 }
1423
1424 function_section (current_function_decl);
1425
1426 #if defined(ASM_OUTPUT_REG_PUSH)
1427 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1428 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1429 #endif
1430
1431 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1432 if (cxt)
1433 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1434 #else
1435 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1436 if (cxt)
1437 {
1438 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1439 }
1440 #endif
1441 #endif
1442
1443 FUNCTION_PROFILER (file, current_function_funcdef_no);
1444
1445 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1446 if (cxt)
1447 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1448 #else
1449 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1450 if (cxt)
1451 {
1452 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1453 }
1454 #endif
1455 #endif
1456
1457 #if defined(ASM_OUTPUT_REG_PUSH)
1458 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1459 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1460 #endif
1461 }
1462
1463 /* Output assembler code for the end of a function.
1464 For clarity, args are same as those of `final_start_function'
1465 even though not all of them are needed. */
1466
1467 void
1468 final_end_function (void)
1469 {
1470 app_disable ();
1471
1472 (*debug_hooks->end_function) (high_function_linenum);
1473
1474 /* Finally, output the function epilogue:
1475 code to restore the stack frame and return to the caller. */
1476 (*targetm.asm_out.function_epilogue) (asm_out_file, get_frame_size ());
1477
1478 /* And debug output. */
1479 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1480
1481 #if defined (DWARF2_UNWIND_INFO)
1482 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1483 && dwarf2out_do_frame ())
1484 dwarf2out_end_epilogue (last_linenum, last_filename);
1485 #endif
1486 }
1487 \f
1488 /* Output assembler code for some insns: all or part of a function.
1489 For description of args, see `final_start_function', above.
1490
1491 PRESCAN is 1 if we are not really outputting,
1492 just scanning as if we were outputting.
1493 Prescanning deletes and rearranges insns just like ordinary output.
1494 PRESCAN is -2 if we are outputting after having prescanned.
1495 In this case, don't try to delete or rearrange insns
1496 because that has already been done.
1497 Prescanning is done only on certain machines. */
1498
1499 void
1500 final (rtx first, FILE *file, int optimize, int prescan)
1501 {
1502 rtx insn;
1503 int max_line = 0;
1504 int max_uid = 0;
1505
1506 last_ignored_compare = 0;
1507
1508 /* Make a map indicating which line numbers appear in this function.
1509 When producing SDB debugging info, delete troublesome line number
1510 notes from inlined functions in other files as well as duplicate
1511 line number notes. */
1512 #ifdef SDB_DEBUGGING_INFO
1513 if (write_symbols == SDB_DEBUG)
1514 {
1515 rtx last = 0;
1516 for (insn = first; insn; insn = NEXT_INSN (insn))
1517 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1518 {
1519 if ((RTX_INTEGRATED_P (insn)
1520 && strcmp (NOTE_SOURCE_FILE (insn), main_input_filename) != 0)
1521 || (last != 0
1522 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1523 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)))
1524 {
1525 delete_insn (insn); /* Use delete_note. */
1526 continue;
1527 }
1528 last = insn;
1529 if (NOTE_LINE_NUMBER (insn) > max_line)
1530 max_line = NOTE_LINE_NUMBER (insn);
1531 }
1532 }
1533 else
1534 #endif
1535 {
1536 for (insn = first; insn; insn = NEXT_INSN (insn))
1537 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > max_line)
1538 max_line = NOTE_LINE_NUMBER (insn);
1539 }
1540
1541 line_note_exists = xcalloc (max_line + 1, sizeof (char));
1542
1543 for (insn = first; insn; insn = NEXT_INSN (insn))
1544 {
1545 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1546 max_uid = INSN_UID (insn);
1547 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1548 line_note_exists[NOTE_LINE_NUMBER (insn)] = 1;
1549 #ifdef HAVE_cc0
1550 /* If CC tracking across branches is enabled, record the insn which
1551 jumps to each branch only reached from one place. */
1552 if (optimize && GET_CODE (insn) == JUMP_INSN)
1553 {
1554 rtx lab = JUMP_LABEL (insn);
1555 if (lab && LABEL_NUSES (lab) == 1)
1556 {
1557 LABEL_REFS (lab) = insn;
1558 }
1559 }
1560 #endif
1561 }
1562
1563 init_recog ();
1564
1565 CC_STATUS_INIT;
1566
1567 /* Output the insns. */
1568 for (insn = NEXT_INSN (first); insn;)
1569 {
1570 #ifdef HAVE_ATTR_length
1571 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1572 {
1573 /* This can be triggered by bugs elsewhere in the compiler if
1574 new insns are created after init_insn_lengths is called. */
1575 if (GET_CODE (insn) == NOTE)
1576 insn_current_address = -1;
1577 else
1578 abort ();
1579 }
1580 else
1581 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1582 #endif /* HAVE_ATTR_length */
1583
1584 insn = final_scan_insn (insn, file, optimize, prescan, 0);
1585 }
1586
1587 free (line_note_exists);
1588 line_note_exists = NULL;
1589 }
1590 \f
1591 const char *
1592 get_insn_template (int code, rtx insn)
1593 {
1594 switch (insn_data[code].output_format)
1595 {
1596 case INSN_OUTPUT_FORMAT_SINGLE:
1597 return insn_data[code].output.single;
1598 case INSN_OUTPUT_FORMAT_MULTI:
1599 return insn_data[code].output.multi[which_alternative];
1600 case INSN_OUTPUT_FORMAT_FUNCTION:
1601 if (insn == NULL)
1602 abort ();
1603 return (*insn_data[code].output.function) (recog_data.operand, insn);
1604
1605 default:
1606 abort ();
1607 }
1608 }
1609
1610 /* Emit the appropriate declaration for an alternate-entry-point
1611 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1612 LABEL_KIND != LABEL_NORMAL.
1613
1614 The case fall-through in this function is intentional. */
1615 static void
1616 output_alternate_entry_point (FILE *file, rtx insn)
1617 {
1618 const char *name = LABEL_NAME (insn);
1619
1620 switch (LABEL_KIND (insn))
1621 {
1622 case LABEL_WEAK_ENTRY:
1623 #ifdef ASM_WEAKEN_LABEL
1624 ASM_WEAKEN_LABEL (file, name);
1625 #endif
1626 case LABEL_GLOBAL_ENTRY:
1627 (*targetm.asm_out.globalize_label) (file, name);
1628 case LABEL_STATIC_ENTRY:
1629 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1630 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1631 #endif
1632 ASM_OUTPUT_LABEL (file, name);
1633 break;
1634
1635 case LABEL_NORMAL:
1636 default:
1637 abort ();
1638 }
1639 }
1640
1641 /* The final scan for one insn, INSN.
1642 Args are same as in `final', except that INSN
1643 is the insn being scanned.
1644 Value returned is the next insn to be scanned.
1645
1646 NOPEEPHOLES is the flag to disallow peephole processing (currently
1647 used for within delayed branch sequence output). */
1648
1649 rtx
1650 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1651 int prescan, int nopeepholes ATTRIBUTE_UNUSED)
1652 {
1653 #ifdef HAVE_cc0
1654 rtx set;
1655 #endif
1656
1657 insn_counter++;
1658
1659 /* Ignore deleted insns. These can occur when we split insns (due to a
1660 template of "#") while not optimizing. */
1661 if (INSN_DELETED_P (insn))
1662 return NEXT_INSN (insn);
1663
1664 switch (GET_CODE (insn))
1665 {
1666 case NOTE:
1667 if (prescan > 0)
1668 break;
1669
1670 switch (NOTE_LINE_NUMBER (insn))
1671 {
1672 case NOTE_INSN_DELETED:
1673 case NOTE_INSN_LOOP_BEG:
1674 case NOTE_INSN_LOOP_END:
1675 case NOTE_INSN_LOOP_END_TOP_COND:
1676 case NOTE_INSN_LOOP_CONT:
1677 case NOTE_INSN_LOOP_VTOP:
1678 case NOTE_INSN_FUNCTION_END:
1679 case NOTE_INSN_REPEATED_LINE_NUMBER:
1680 case NOTE_INSN_EXPECTED_VALUE:
1681 break;
1682
1683 case NOTE_INSN_BASIC_BLOCK:
1684 #ifdef IA64_UNWIND_INFO
1685 IA64_UNWIND_EMIT (asm_out_file, insn);
1686 #endif
1687 if (flag_debug_asm)
1688 fprintf (asm_out_file, "\t%s basic block %d\n",
1689 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1690 break;
1691
1692 case NOTE_INSN_EH_REGION_BEG:
1693 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1694 NOTE_EH_HANDLER (insn));
1695 break;
1696
1697 case NOTE_INSN_EH_REGION_END:
1698 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1699 NOTE_EH_HANDLER (insn));
1700 break;
1701
1702 case NOTE_INSN_PROLOGUE_END:
1703 (*targetm.asm_out.function_end_prologue) (file);
1704 profile_after_prologue (file);
1705 break;
1706
1707 case NOTE_INSN_EPILOGUE_BEG:
1708 (*targetm.asm_out.function_begin_epilogue) (file);
1709 break;
1710
1711 case NOTE_INSN_FUNCTION_BEG:
1712 app_disable ();
1713 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1714 break;
1715
1716 case NOTE_INSN_BLOCK_BEG:
1717 if (debug_info_level == DINFO_LEVEL_NORMAL
1718 || debug_info_level == DINFO_LEVEL_VERBOSE
1719 || write_symbols == DWARF_DEBUG
1720 || write_symbols == DWARF2_DEBUG
1721 || write_symbols == VMS_AND_DWARF2_DEBUG
1722 || write_symbols == VMS_DEBUG)
1723 {
1724 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1725
1726 app_disable ();
1727 ++block_depth;
1728 high_block_linenum = last_linenum;
1729
1730 /* Output debugging info about the symbol-block beginning. */
1731 (*debug_hooks->begin_block) (last_linenum, n);
1732
1733 /* Mark this block as output. */
1734 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1735 }
1736 break;
1737
1738 case NOTE_INSN_BLOCK_END:
1739 if (debug_info_level == DINFO_LEVEL_NORMAL
1740 || debug_info_level == DINFO_LEVEL_VERBOSE
1741 || write_symbols == DWARF_DEBUG
1742 || write_symbols == DWARF2_DEBUG
1743 || write_symbols == VMS_AND_DWARF2_DEBUG
1744 || write_symbols == VMS_DEBUG)
1745 {
1746 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1747
1748 app_disable ();
1749
1750 /* End of a symbol-block. */
1751 --block_depth;
1752 if (block_depth < 0)
1753 abort ();
1754
1755 (*debug_hooks->end_block) (high_block_linenum, n);
1756 }
1757 break;
1758
1759 case NOTE_INSN_DELETED_LABEL:
1760 /* Emit the label. We may have deleted the CODE_LABEL because
1761 the label could be proved to be unreachable, though still
1762 referenced (in the form of having its address taken. */
1763 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1764 break;
1765
1766 case 0:
1767 break;
1768
1769 default:
1770 if (NOTE_LINE_NUMBER (insn) <= 0)
1771 abort ();
1772 break;
1773 }
1774 break;
1775
1776 case BARRIER:
1777 #if defined (DWARF2_UNWIND_INFO)
1778 if (dwarf2out_do_frame ())
1779 dwarf2out_frame_debug (insn);
1780 #endif
1781 break;
1782
1783 case CODE_LABEL:
1784 /* The target port might emit labels in the output function for
1785 some insn, e.g. sh.c output_branchy_insn. */
1786 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1787 {
1788 int align = LABEL_TO_ALIGNMENT (insn);
1789 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1790 int max_skip = LABEL_TO_MAX_SKIP (insn);
1791 #endif
1792
1793 if (align && NEXT_INSN (insn))
1794 {
1795 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1796 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1797 #else
1798 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1799 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1800 #else
1801 ASM_OUTPUT_ALIGN (file, align);
1802 #endif
1803 #endif
1804 }
1805 }
1806 #ifdef HAVE_cc0
1807 CC_STATUS_INIT;
1808 /* If this label is reached from only one place, set the condition
1809 codes from the instruction just before the branch. */
1810
1811 /* Disabled because some insns set cc_status in the C output code
1812 and NOTICE_UPDATE_CC alone can set incorrect status. */
1813 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1814 {
1815 rtx jump = LABEL_REFS (insn);
1816 rtx barrier = prev_nonnote_insn (insn);
1817 rtx prev;
1818 /* If the LABEL_REFS field of this label has been set to point
1819 at a branch, the predecessor of the branch is a regular
1820 insn, and that branch is the only way to reach this label,
1821 set the condition codes based on the branch and its
1822 predecessor. */
1823 if (barrier && GET_CODE (barrier) == BARRIER
1824 && jump && GET_CODE (jump) == JUMP_INSN
1825 && (prev = prev_nonnote_insn (jump))
1826 && GET_CODE (prev) == INSN)
1827 {
1828 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1829 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1830 }
1831 }
1832 #endif
1833 if (prescan > 0)
1834 break;
1835
1836 if (LABEL_NAME (insn))
1837 (*debug_hooks->label) (insn);
1838
1839 if (app_on)
1840 {
1841 fputs (ASM_APP_OFF, file);
1842 app_on = 0;
1843 }
1844 if (NEXT_INSN (insn) != 0
1845 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
1846 {
1847 rtx nextbody = PATTERN (NEXT_INSN (insn));
1848
1849 /* If this label is followed by a jump-table,
1850 make sure we put the label in the read-only section. Also
1851 possibly write the label and jump table together. */
1852
1853 if (GET_CODE (nextbody) == ADDR_VEC
1854 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1855 {
1856 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1857 /* In this case, the case vector is being moved by the
1858 target, so don't output the label at all. Leave that
1859 to the back end macros. */
1860 #else
1861 if (! JUMP_TABLES_IN_TEXT_SECTION)
1862 {
1863 int log_align;
1864
1865 readonly_data_section ();
1866
1867 #ifdef ADDR_VEC_ALIGN
1868 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1869 #else
1870 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1871 #endif
1872 ASM_OUTPUT_ALIGN (file, log_align);
1873 }
1874 else
1875 function_section (current_function_decl);
1876
1877 #ifdef ASM_OUTPUT_CASE_LABEL
1878 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1879 NEXT_INSN (insn));
1880 #else
1881 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1882 #endif
1883 #endif
1884 break;
1885 }
1886 }
1887 if (LABEL_ALT_ENTRY_P (insn))
1888 output_alternate_entry_point (file, insn);
1889 else
1890 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1891 break;
1892
1893 default:
1894 {
1895 rtx body = PATTERN (insn);
1896 int insn_code_number;
1897 const char *template;
1898 rtx note;
1899
1900 /* An INSN, JUMP_INSN or CALL_INSN.
1901 First check for special kinds that recog doesn't recognize. */
1902
1903 if (GET_CODE (body) == USE /* These are just declarations. */
1904 || GET_CODE (body) == CLOBBER)
1905 break;
1906
1907 #ifdef HAVE_cc0
1908 /* If there is a REG_CC_SETTER note on this insn, it means that
1909 the setting of the condition code was done in the delay slot
1910 of the insn that branched here. So recover the cc status
1911 from the insn that set it. */
1912
1913 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1914 if (note)
1915 {
1916 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1917 cc_prev_status = cc_status;
1918 }
1919 #endif
1920
1921 /* Detect insns that are really jump-tables
1922 and output them as such. */
1923
1924 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1925 {
1926 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1927 int vlen, idx;
1928 #endif
1929
1930 if (prescan > 0)
1931 break;
1932
1933 if (app_on)
1934 {
1935 fputs (ASM_APP_OFF, file);
1936 app_on = 0;
1937 }
1938
1939 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1940 if (GET_CODE (body) == ADDR_VEC)
1941 {
1942 #ifdef ASM_OUTPUT_ADDR_VEC
1943 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
1944 #else
1945 abort ();
1946 #endif
1947 }
1948 else
1949 {
1950 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1951 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
1952 #else
1953 abort ();
1954 #endif
1955 }
1956 #else
1957 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
1958 for (idx = 0; idx < vlen; idx++)
1959 {
1960 if (GET_CODE (body) == ADDR_VEC)
1961 {
1962 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1963 ASM_OUTPUT_ADDR_VEC_ELT
1964 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
1965 #else
1966 abort ();
1967 #endif
1968 }
1969 else
1970 {
1971 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1972 ASM_OUTPUT_ADDR_DIFF_ELT
1973 (file,
1974 body,
1975 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
1976 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
1977 #else
1978 abort ();
1979 #endif
1980 }
1981 }
1982 #ifdef ASM_OUTPUT_CASE_END
1983 ASM_OUTPUT_CASE_END (file,
1984 CODE_LABEL_NUMBER (PREV_INSN (insn)),
1985 insn);
1986 #endif
1987 #endif
1988
1989 function_section (current_function_decl);
1990
1991 break;
1992 }
1993 /* Output this line note if it is the first or the last line
1994 note in a row. */
1995 if (notice_source_line (insn))
1996 {
1997 (*debug_hooks->source_line) (last_linenum, last_filename);
1998 }
1999
2000 if (GET_CODE (body) == ASM_INPUT)
2001 {
2002 const char *string = XSTR (body, 0);
2003
2004 /* There's no telling what that did to the condition codes. */
2005 CC_STATUS_INIT;
2006 if (prescan > 0)
2007 break;
2008
2009 if (string[0])
2010 {
2011 if (! app_on)
2012 {
2013 fputs (ASM_APP_ON, file);
2014 app_on = 1;
2015 }
2016 fprintf (asm_out_file, "\t%s\n", string);
2017 }
2018 break;
2019 }
2020
2021 /* Detect `asm' construct with operands. */
2022 if (asm_noperands (body) >= 0)
2023 {
2024 unsigned int noperands = asm_noperands (body);
2025 rtx *ops = alloca (noperands * sizeof (rtx));
2026 const char *string;
2027
2028 /* There's no telling what that did to the condition codes. */
2029 CC_STATUS_INIT;
2030 if (prescan > 0)
2031 break;
2032
2033 /* Get out the operand values. */
2034 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2035 /* Inhibit aborts on what would otherwise be compiler bugs. */
2036 insn_noperands = noperands;
2037 this_is_asm_operands = insn;
2038
2039 #ifdef FINAL_PRESCAN_INSN
2040 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2041 #endif
2042
2043 /* Output the insn using them. */
2044 if (string[0])
2045 {
2046 if (! app_on)
2047 {
2048 fputs (ASM_APP_ON, file);
2049 app_on = 1;
2050 }
2051 output_asm_insn (string, ops);
2052 }
2053
2054 this_is_asm_operands = 0;
2055 break;
2056 }
2057
2058 if (prescan <= 0 && app_on)
2059 {
2060 fputs (ASM_APP_OFF, file);
2061 app_on = 0;
2062 }
2063
2064 if (GET_CODE (body) == SEQUENCE)
2065 {
2066 /* A delayed-branch sequence */
2067 int i;
2068 rtx next;
2069
2070 if (prescan > 0)
2071 break;
2072 final_sequence = body;
2073
2074 /* Record the delay slots' frame information before the branch.
2075 This is needed for delayed calls: see execute_cfa_program(). */
2076 #if defined (DWARF2_UNWIND_INFO)
2077 if (dwarf2out_do_frame ())
2078 for (i = 1; i < XVECLEN (body, 0); i++)
2079 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2080 #endif
2081
2082 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2083 force the restoration of a comparison that was previously
2084 thought unnecessary. If that happens, cancel this sequence
2085 and cause that insn to be restored. */
2086
2087 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1);
2088 if (next != XVECEXP (body, 0, 1))
2089 {
2090 final_sequence = 0;
2091 return next;
2092 }
2093
2094 for (i = 1; i < XVECLEN (body, 0); i++)
2095 {
2096 rtx insn = XVECEXP (body, 0, i);
2097 rtx next = NEXT_INSN (insn);
2098 /* We loop in case any instruction in a delay slot gets
2099 split. */
2100 do
2101 insn = final_scan_insn (insn, file, 0, prescan, 1);
2102 while (insn != next);
2103 }
2104 #ifdef DBR_OUTPUT_SEQEND
2105 DBR_OUTPUT_SEQEND (file);
2106 #endif
2107 final_sequence = 0;
2108
2109 /* If the insn requiring the delay slot was a CALL_INSN, the
2110 insns in the delay slot are actually executed before the
2111 called function. Hence we don't preserve any CC-setting
2112 actions in these insns and the CC must be marked as being
2113 clobbered by the function. */
2114 if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
2115 {
2116 CC_STATUS_INIT;
2117 }
2118 break;
2119 }
2120
2121 /* We have a real machine instruction as rtl. */
2122
2123 body = PATTERN (insn);
2124
2125 #ifdef HAVE_cc0
2126 set = single_set (insn);
2127
2128 /* Check for redundant test and compare instructions
2129 (when the condition codes are already set up as desired).
2130 This is done only when optimizing; if not optimizing,
2131 it should be possible for the user to alter a variable
2132 with the debugger in between statements
2133 and the next statement should reexamine the variable
2134 to compute the condition codes. */
2135
2136 if (optimize)
2137 {
2138 if (set
2139 && GET_CODE (SET_DEST (set)) == CC0
2140 && insn != last_ignored_compare)
2141 {
2142 if (GET_CODE (SET_SRC (set)) == SUBREG)
2143 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2144 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2145 {
2146 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2147 XEXP (SET_SRC (set), 0)
2148 = alter_subreg (&XEXP (SET_SRC (set), 0));
2149 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2150 XEXP (SET_SRC (set), 1)
2151 = alter_subreg (&XEXP (SET_SRC (set), 1));
2152 }
2153 if ((cc_status.value1 != 0
2154 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2155 || (cc_status.value2 != 0
2156 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2157 {
2158 /* Don't delete insn if it has an addressing side-effect. */
2159 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2160 /* or if anything in it is volatile. */
2161 && ! volatile_refs_p (PATTERN (insn)))
2162 {
2163 /* We don't really delete the insn; just ignore it. */
2164 last_ignored_compare = insn;
2165 break;
2166 }
2167 }
2168 }
2169 }
2170 #endif
2171
2172 #ifndef STACK_REGS
2173 /* Don't bother outputting obvious no-ops, even without -O.
2174 This optimization is fast and doesn't interfere with debugging.
2175 Don't do this if the insn is in a delay slot, since this
2176 will cause an improper number of delay insns to be written. */
2177 if (final_sequence == 0
2178 && prescan >= 0
2179 && GET_CODE (insn) == INSN && GET_CODE (body) == SET
2180 && GET_CODE (SET_SRC (body)) == REG
2181 && GET_CODE (SET_DEST (body)) == REG
2182 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2183 break;
2184 #endif
2185
2186 #ifdef HAVE_cc0
2187 /* If this is a conditional branch, maybe modify it
2188 if the cc's are in a nonstandard state
2189 so that it accomplishes the same thing that it would
2190 do straightforwardly if the cc's were set up normally. */
2191
2192 if (cc_status.flags != 0
2193 && GET_CODE (insn) == JUMP_INSN
2194 && GET_CODE (body) == SET
2195 && SET_DEST (body) == pc_rtx
2196 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2197 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body), 0))) == '<'
2198 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2199 /* This is done during prescan; it is not done again
2200 in final scan when prescan has been done. */
2201 && prescan >= 0)
2202 {
2203 /* This function may alter the contents of its argument
2204 and clear some of the cc_status.flags bits.
2205 It may also return 1 meaning condition now always true
2206 or -1 meaning condition now always false
2207 or 2 meaning condition nontrivial but altered. */
2208 int result = alter_cond (XEXP (SET_SRC (body), 0));
2209 /* If condition now has fixed value, replace the IF_THEN_ELSE
2210 with its then-operand or its else-operand. */
2211 if (result == 1)
2212 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2213 if (result == -1)
2214 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2215
2216 /* The jump is now either unconditional or a no-op.
2217 If it has become a no-op, don't try to output it.
2218 (It would not be recognized.) */
2219 if (SET_SRC (body) == pc_rtx)
2220 {
2221 delete_insn (insn);
2222 break;
2223 }
2224 else if (GET_CODE (SET_SRC (body)) == RETURN)
2225 /* Replace (set (pc) (return)) with (return). */
2226 PATTERN (insn) = body = SET_SRC (body);
2227
2228 /* Rerecognize the instruction if it has changed. */
2229 if (result != 0)
2230 INSN_CODE (insn) = -1;
2231 }
2232
2233 /* Make same adjustments to instructions that examine the
2234 condition codes without jumping and instructions that
2235 handle conditional moves (if this machine has either one). */
2236
2237 if (cc_status.flags != 0
2238 && set != 0)
2239 {
2240 rtx cond_rtx, then_rtx, else_rtx;
2241
2242 if (GET_CODE (insn) != JUMP_INSN
2243 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2244 {
2245 cond_rtx = XEXP (SET_SRC (set), 0);
2246 then_rtx = XEXP (SET_SRC (set), 1);
2247 else_rtx = XEXP (SET_SRC (set), 2);
2248 }
2249 else
2250 {
2251 cond_rtx = SET_SRC (set);
2252 then_rtx = const_true_rtx;
2253 else_rtx = const0_rtx;
2254 }
2255
2256 switch (GET_CODE (cond_rtx))
2257 {
2258 case GTU:
2259 case GT:
2260 case LTU:
2261 case LT:
2262 case GEU:
2263 case GE:
2264 case LEU:
2265 case LE:
2266 case EQ:
2267 case NE:
2268 {
2269 int result;
2270 if (XEXP (cond_rtx, 0) != cc0_rtx)
2271 break;
2272 result = alter_cond (cond_rtx);
2273 if (result == 1)
2274 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2275 else if (result == -1)
2276 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2277 else if (result == 2)
2278 INSN_CODE (insn) = -1;
2279 if (SET_DEST (set) == SET_SRC (set))
2280 delete_insn (insn);
2281 }
2282 break;
2283
2284 default:
2285 break;
2286 }
2287 }
2288
2289 #endif
2290
2291 #ifdef HAVE_peephole
2292 /* Do machine-specific peephole optimizations if desired. */
2293
2294 if (optimize && !flag_no_peephole && !nopeepholes)
2295 {
2296 rtx next = peephole (insn);
2297 /* When peepholing, if there were notes within the peephole,
2298 emit them before the peephole. */
2299 if (next != 0 && next != NEXT_INSN (insn))
2300 {
2301 rtx prev = PREV_INSN (insn);
2302
2303 for (note = NEXT_INSN (insn); note != next;
2304 note = NEXT_INSN (note))
2305 final_scan_insn (note, file, optimize, prescan, nopeepholes);
2306
2307 /* In case this is prescan, put the notes
2308 in proper position for later rescan. */
2309 note = NEXT_INSN (insn);
2310 PREV_INSN (note) = prev;
2311 NEXT_INSN (prev) = note;
2312 NEXT_INSN (PREV_INSN (next)) = insn;
2313 PREV_INSN (insn) = PREV_INSN (next);
2314 NEXT_INSN (insn) = next;
2315 PREV_INSN (next) = insn;
2316 }
2317
2318 /* PEEPHOLE might have changed this. */
2319 body = PATTERN (insn);
2320 }
2321 #endif
2322
2323 /* Try to recognize the instruction.
2324 If successful, verify that the operands satisfy the
2325 constraints for the instruction. Crash if they don't,
2326 since `reload' should have changed them so that they do. */
2327
2328 insn_code_number = recog_memoized (insn);
2329 cleanup_subreg_operands (insn);
2330
2331 /* Dump the insn in the assembly for debugging. */
2332 if (flag_dump_rtl_in_asm)
2333 {
2334 print_rtx_head = ASM_COMMENT_START;
2335 print_rtl_single (asm_out_file, insn);
2336 print_rtx_head = "";
2337 }
2338
2339 if (! constrain_operands_cached (1))
2340 fatal_insn_not_found (insn);
2341
2342 /* Some target machines need to prescan each insn before
2343 it is output. */
2344
2345 #ifdef FINAL_PRESCAN_INSN
2346 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2347 #endif
2348
2349 #ifdef HAVE_conditional_execution
2350 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2351 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2352 else
2353 current_insn_predicate = NULL_RTX;
2354 #endif
2355
2356 #ifdef HAVE_cc0
2357 cc_prev_status = cc_status;
2358
2359 /* Update `cc_status' for this instruction.
2360 The instruction's output routine may change it further.
2361 If the output routine for a jump insn needs to depend
2362 on the cc status, it should look at cc_prev_status. */
2363
2364 NOTICE_UPDATE_CC (body, insn);
2365 #endif
2366
2367 current_output_insn = debug_insn = insn;
2368
2369 #if defined (DWARF2_UNWIND_INFO)
2370 if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
2371 dwarf2out_frame_debug (insn);
2372 #endif
2373
2374 /* Find the proper template for this insn. */
2375 template = get_insn_template (insn_code_number, insn);
2376
2377 /* If the C code returns 0, it means that it is a jump insn
2378 which follows a deleted test insn, and that test insn
2379 needs to be reinserted. */
2380 if (template == 0)
2381 {
2382 rtx prev;
2383
2384 if (prev_nonnote_insn (insn) != last_ignored_compare)
2385 abort ();
2386
2387 /* We have already processed the notes between the setter and
2388 the user. Make sure we don't process them again, this is
2389 particularly important if one of the notes is a block
2390 scope note or an EH note. */
2391 for (prev = insn;
2392 prev != last_ignored_compare;
2393 prev = PREV_INSN (prev))
2394 {
2395 if (GET_CODE (prev) == NOTE)
2396 delete_insn (prev); /* Use delete_note. */
2397 }
2398
2399 return prev;
2400 }
2401
2402 /* If the template is the string "#", it means that this insn must
2403 be split. */
2404 if (template[0] == '#' && template[1] == '\0')
2405 {
2406 rtx new = try_split (body, insn, 0);
2407
2408 /* If we didn't split the insn, go away. */
2409 if (new == insn && PATTERN (new) == body)
2410 fatal_insn ("could not split insn", insn);
2411
2412 #ifdef HAVE_ATTR_length
2413 /* This instruction should have been split in shorten_branches,
2414 to ensure that we would have valid length info for the
2415 splitees. */
2416 abort ();
2417 #endif
2418
2419 return new;
2420 }
2421
2422 if (prescan > 0)
2423 break;
2424
2425 #ifdef IA64_UNWIND_INFO
2426 IA64_UNWIND_EMIT (asm_out_file, insn);
2427 #endif
2428 /* Output assembler code from the template. */
2429
2430 output_asm_insn (template, recog_data.operand);
2431
2432 /* If necessary, report the effect that the instruction has on
2433 the unwind info. We've already done this for delay slots
2434 and call instructions. */
2435 #if defined (DWARF2_UNWIND_INFO)
2436 if (GET_CODE (insn) == INSN
2437 #if !defined (HAVE_prologue)
2438 && !ACCUMULATE_OUTGOING_ARGS
2439 #endif
2440 && final_sequence == 0
2441 && dwarf2out_do_frame ())
2442 dwarf2out_frame_debug (insn);
2443 #endif
2444
2445 #if 0
2446 /* It's not at all clear why we did this and doing so used to
2447 interfere with tests that used REG_WAS_0 notes, which are
2448 now gone, so let's try with this out. */
2449
2450 /* Mark this insn as having been output. */
2451 INSN_DELETED_P (insn) = 1;
2452 #endif
2453
2454 /* Emit information for vtable gc. */
2455 note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
2456
2457 current_output_insn = debug_insn = 0;
2458 }
2459 }
2460 return NEXT_INSN (insn);
2461 }
2462 \f
2463 /* Output debugging info to the assembler file FILE
2464 based on the NOTE-insn INSN, assumed to be a line number. */
2465
2466 static bool
2467 notice_source_line (rtx insn)
2468 {
2469 const char *filename = insn_file (insn);
2470 int linenum = insn_line (insn);
2471
2472 if (filename && (filename != last_filename || last_linenum != linenum))
2473 {
2474 last_filename = filename;
2475 last_linenum = linenum;
2476 high_block_linenum = MAX (last_linenum, high_block_linenum);
2477 high_function_linenum = MAX (last_linenum, high_function_linenum);
2478 return true;
2479 }
2480 return false;
2481 }
2482 \f
2483 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2484 directly to the desired hard register. */
2485
2486 void
2487 cleanup_subreg_operands (rtx insn)
2488 {
2489 int i;
2490 extract_insn_cached (insn);
2491 for (i = 0; i < recog_data.n_operands; i++)
2492 {
2493 /* The following test cannot use recog_data.operand when testing
2494 for a SUBREG: the underlying object might have been changed
2495 already if we are inside a match_operator expression that
2496 matches the else clause. Instead we test the underlying
2497 expression directly. */
2498 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2499 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2500 else if (GET_CODE (recog_data.operand[i]) == PLUS
2501 || GET_CODE (recog_data.operand[i]) == MULT
2502 || GET_CODE (recog_data.operand[i]) == MEM)
2503 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2504 }
2505
2506 for (i = 0; i < recog_data.n_dups; i++)
2507 {
2508 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2509 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2510 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2511 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2512 || GET_CODE (*recog_data.dup_loc[i]) == MEM)
2513 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2514 }
2515 }
2516
2517 /* If X is a SUBREG, replace it with a REG or a MEM,
2518 based on the thing it is a subreg of. */
2519
2520 rtx
2521 alter_subreg (rtx *xp)
2522 {
2523 rtx x = *xp;
2524 rtx y = SUBREG_REG (x);
2525
2526 /* simplify_subreg does not remove subreg from volatile references.
2527 We are required to. */
2528 if (GET_CODE (y) == MEM)
2529 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2530 else
2531 {
2532 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2533 SUBREG_BYTE (x));
2534
2535 if (new != 0)
2536 *xp = new;
2537 /* Simplify_subreg can't handle some REG cases, but we have to. */
2538 else if (GET_CODE (y) == REG)
2539 {
2540 unsigned int regno = subreg_hard_regno (x, 1);
2541 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2542 }
2543 else
2544 abort ();
2545 }
2546
2547 return *xp;
2548 }
2549
2550 /* Do alter_subreg on all the SUBREGs contained in X. */
2551
2552 static rtx
2553 walk_alter_subreg (rtx *xp)
2554 {
2555 rtx x = *xp;
2556 switch (GET_CODE (x))
2557 {
2558 case PLUS:
2559 case MULT:
2560 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2561 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2562 break;
2563
2564 case MEM:
2565 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2566 break;
2567
2568 case SUBREG:
2569 return alter_subreg (xp);
2570
2571 default:
2572 break;
2573 }
2574
2575 return *xp;
2576 }
2577 \f
2578 #ifdef HAVE_cc0
2579
2580 /* Given BODY, the body of a jump instruction, alter the jump condition
2581 as required by the bits that are set in cc_status.flags.
2582 Not all of the bits there can be handled at this level in all cases.
2583
2584 The value is normally 0.
2585 1 means that the condition has become always true.
2586 -1 means that the condition has become always false.
2587 2 means that COND has been altered. */
2588
2589 static int
2590 alter_cond (rtx cond)
2591 {
2592 int value = 0;
2593
2594 if (cc_status.flags & CC_REVERSED)
2595 {
2596 value = 2;
2597 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2598 }
2599
2600 if (cc_status.flags & CC_INVERTED)
2601 {
2602 value = 2;
2603 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2604 }
2605
2606 if (cc_status.flags & CC_NOT_POSITIVE)
2607 switch (GET_CODE (cond))
2608 {
2609 case LE:
2610 case LEU:
2611 case GEU:
2612 /* Jump becomes unconditional. */
2613 return 1;
2614
2615 case GT:
2616 case GTU:
2617 case LTU:
2618 /* Jump becomes no-op. */
2619 return -1;
2620
2621 case GE:
2622 PUT_CODE (cond, EQ);
2623 value = 2;
2624 break;
2625
2626 case LT:
2627 PUT_CODE (cond, NE);
2628 value = 2;
2629 break;
2630
2631 default:
2632 break;
2633 }
2634
2635 if (cc_status.flags & CC_NOT_NEGATIVE)
2636 switch (GET_CODE (cond))
2637 {
2638 case GE:
2639 case GEU:
2640 /* Jump becomes unconditional. */
2641 return 1;
2642
2643 case LT:
2644 case LTU:
2645 /* Jump becomes no-op. */
2646 return -1;
2647
2648 case LE:
2649 case LEU:
2650 PUT_CODE (cond, EQ);
2651 value = 2;
2652 break;
2653
2654 case GT:
2655 case GTU:
2656 PUT_CODE (cond, NE);
2657 value = 2;
2658 break;
2659
2660 default:
2661 break;
2662 }
2663
2664 if (cc_status.flags & CC_NO_OVERFLOW)
2665 switch (GET_CODE (cond))
2666 {
2667 case GEU:
2668 /* Jump becomes unconditional. */
2669 return 1;
2670
2671 case LEU:
2672 PUT_CODE (cond, EQ);
2673 value = 2;
2674 break;
2675
2676 case GTU:
2677 PUT_CODE (cond, NE);
2678 value = 2;
2679 break;
2680
2681 case LTU:
2682 /* Jump becomes no-op. */
2683 return -1;
2684
2685 default:
2686 break;
2687 }
2688
2689 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2690 switch (GET_CODE (cond))
2691 {
2692 default:
2693 abort ();
2694
2695 case NE:
2696 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2697 value = 2;
2698 break;
2699
2700 case EQ:
2701 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2702 value = 2;
2703 break;
2704 }
2705
2706 if (cc_status.flags & CC_NOT_SIGNED)
2707 /* The flags are valid if signed condition operators are converted
2708 to unsigned. */
2709 switch (GET_CODE (cond))
2710 {
2711 case LE:
2712 PUT_CODE (cond, LEU);
2713 value = 2;
2714 break;
2715
2716 case LT:
2717 PUT_CODE (cond, LTU);
2718 value = 2;
2719 break;
2720
2721 case GT:
2722 PUT_CODE (cond, GTU);
2723 value = 2;
2724 break;
2725
2726 case GE:
2727 PUT_CODE (cond, GEU);
2728 value = 2;
2729 break;
2730
2731 default:
2732 break;
2733 }
2734
2735 return value;
2736 }
2737 #endif
2738 \f
2739 /* Report inconsistency between the assembler template and the operands.
2740 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2741
2742 void
2743 output_operand_lossage (const char *msgid, ...)
2744 {
2745 char *fmt_string;
2746 char *new_message;
2747 const char *pfx_str;
2748 va_list ap;
2749
2750 va_start (ap, msgid);
2751
2752 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2753 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2754 vasprintf (&new_message, fmt_string, ap);
2755
2756 if (this_is_asm_operands)
2757 error_for_asm (this_is_asm_operands, "%s", new_message);
2758 else
2759 internal_error ("%s", new_message);
2760
2761 free (fmt_string);
2762 free (new_message);
2763 va_end (ap);
2764 }
2765 \f
2766 /* Output of assembler code from a template, and its subroutines. */
2767
2768 /* Annotate the assembly with a comment describing the pattern and
2769 alternative used. */
2770
2771 static void
2772 output_asm_name (void)
2773 {
2774 if (debug_insn)
2775 {
2776 int num = INSN_CODE (debug_insn);
2777 fprintf (asm_out_file, "\t%s %d\t%s",
2778 ASM_COMMENT_START, INSN_UID (debug_insn),
2779 insn_data[num].name);
2780 if (insn_data[num].n_alternatives > 1)
2781 fprintf (asm_out_file, "/%d", which_alternative + 1);
2782 #ifdef HAVE_ATTR_length
2783 fprintf (asm_out_file, "\t[length = %d]",
2784 get_attr_length (debug_insn));
2785 #endif
2786 /* Clear this so only the first assembler insn
2787 of any rtl insn will get the special comment for -dp. */
2788 debug_insn = 0;
2789 }
2790 }
2791
2792 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2793 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2794 corresponds to the address of the object and 0 if to the object. */
2795
2796 static tree
2797 get_mem_expr_from_op (rtx op, int *paddressp)
2798 {
2799 tree expr;
2800 int inner_addressp;
2801
2802 *paddressp = 0;
2803
2804 if (GET_CODE (op) == REG)
2805 return REG_EXPR (op);
2806 else if (GET_CODE (op) != MEM)
2807 return 0;
2808
2809 if (MEM_EXPR (op) != 0)
2810 return MEM_EXPR (op);
2811
2812 /* Otherwise we have an address, so indicate it and look at the address. */
2813 *paddressp = 1;
2814 op = XEXP (op, 0);
2815
2816 /* First check if we have a decl for the address, then look at the right side
2817 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2818 But don't allow the address to itself be indirect. */
2819 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2820 return expr;
2821 else if (GET_CODE (op) == PLUS
2822 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2823 return expr;
2824
2825 while (GET_RTX_CLASS (GET_CODE (op)) == '1'
2826 || GET_RTX_CLASS (GET_CODE (op)) == '2')
2827 op = XEXP (op, 0);
2828
2829 expr = get_mem_expr_from_op (op, &inner_addressp);
2830 return inner_addressp ? 0 : expr;
2831 }
2832
2833 /* Output operand names for assembler instructions. OPERANDS is the
2834 operand vector, OPORDER is the order to write the operands, and NOPS
2835 is the number of operands to write. */
2836
2837 static void
2838 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2839 {
2840 int wrote = 0;
2841 int i;
2842
2843 for (i = 0; i < nops; i++)
2844 {
2845 int addressp;
2846 rtx op = operands[oporder[i]];
2847 tree expr = get_mem_expr_from_op (op, &addressp);
2848
2849 fprintf (asm_out_file, "%c%s",
2850 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2851 wrote = 1;
2852 if (expr)
2853 {
2854 fprintf (asm_out_file, "%s",
2855 addressp ? "*" : "");
2856 print_mem_expr (asm_out_file, expr);
2857 wrote = 1;
2858 }
2859 else if (REG_P (op) && ORIGINAL_REGNO (op)
2860 && ORIGINAL_REGNO (op) != REGNO (op))
2861 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2862 }
2863 }
2864
2865 /* Output text from TEMPLATE to the assembler output file,
2866 obeying %-directions to substitute operands taken from
2867 the vector OPERANDS.
2868
2869 %N (for N a digit) means print operand N in usual manner.
2870 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2871 and print the label name with no punctuation.
2872 %cN means require operand N to be a constant
2873 and print the constant expression with no punctuation.
2874 %aN means expect operand N to be a memory address
2875 (not a memory reference!) and print a reference
2876 to that address.
2877 %nN means expect operand N to be a constant
2878 and print a constant expression for minus the value
2879 of the operand, with no other punctuation. */
2880
2881 void
2882 output_asm_insn (const char *template, rtx *operands)
2883 {
2884 const char *p;
2885 int c;
2886 #ifdef ASSEMBLER_DIALECT
2887 int dialect = 0;
2888 #endif
2889 int oporder[MAX_RECOG_OPERANDS];
2890 char opoutput[MAX_RECOG_OPERANDS];
2891 int ops = 0;
2892
2893 /* An insn may return a null string template
2894 in a case where no assembler code is needed. */
2895 if (*template == 0)
2896 return;
2897
2898 memset (opoutput, 0, sizeof opoutput);
2899 p = template;
2900 putc ('\t', asm_out_file);
2901
2902 #ifdef ASM_OUTPUT_OPCODE
2903 ASM_OUTPUT_OPCODE (asm_out_file, p);
2904 #endif
2905
2906 while ((c = *p++))
2907 switch (c)
2908 {
2909 case '\n':
2910 if (flag_verbose_asm)
2911 output_asm_operand_names (operands, oporder, ops);
2912 if (flag_print_asm_name)
2913 output_asm_name ();
2914
2915 ops = 0;
2916 memset (opoutput, 0, sizeof opoutput);
2917
2918 putc (c, asm_out_file);
2919 #ifdef ASM_OUTPUT_OPCODE
2920 while ((c = *p) == '\t')
2921 {
2922 putc (c, asm_out_file);
2923 p++;
2924 }
2925 ASM_OUTPUT_OPCODE (asm_out_file, p);
2926 #endif
2927 break;
2928
2929 #ifdef ASSEMBLER_DIALECT
2930 case '{':
2931 {
2932 int i;
2933
2934 if (dialect)
2935 output_operand_lossage ("nested assembly dialect alternatives");
2936 else
2937 dialect = 1;
2938
2939 /* If we want the first dialect, do nothing. Otherwise, skip
2940 DIALECT_NUMBER of strings ending with '|'. */
2941 for (i = 0; i < dialect_number; i++)
2942 {
2943 while (*p && *p != '}' && *p++ != '|')
2944 ;
2945 if (*p == '}')
2946 break;
2947 if (*p == '|')
2948 p++;
2949 }
2950
2951 if (*p == '\0')
2952 output_operand_lossage ("unterminated assembly dialect alternative");
2953 }
2954 break;
2955
2956 case '|':
2957 if (dialect)
2958 {
2959 /* Skip to close brace. */
2960 do
2961 {
2962 if (*p == '\0')
2963 {
2964 output_operand_lossage ("unterminated assembly dialect alternative");
2965 break;
2966 }
2967 }
2968 while (*p++ != '}');
2969 dialect = 0;
2970 }
2971 else
2972 putc (c, asm_out_file);
2973 break;
2974
2975 case '}':
2976 if (! dialect)
2977 putc (c, asm_out_file);
2978 dialect = 0;
2979 break;
2980 #endif
2981
2982 case '%':
2983 /* %% outputs a single %. */
2984 if (*p == '%')
2985 {
2986 p++;
2987 putc (c, asm_out_file);
2988 }
2989 /* %= outputs a number which is unique to each insn in the entire
2990 compilation. This is useful for making local labels that are
2991 referred to more than once in a given insn. */
2992 else if (*p == '=')
2993 {
2994 p++;
2995 fprintf (asm_out_file, "%d", insn_counter);
2996 }
2997 /* % followed by a letter and some digits
2998 outputs an operand in a special way depending on the letter.
2999 Letters `acln' are implemented directly.
3000 Other letters are passed to `output_operand' so that
3001 the PRINT_OPERAND macro can define them. */
3002 else if (ISALPHA (*p))
3003 {
3004 int letter = *p++;
3005 c = atoi (p);
3006
3007 if (! ISDIGIT (*p))
3008 output_operand_lossage ("operand number missing after %%-letter");
3009 else if (this_is_asm_operands
3010 && (c < 0 || (unsigned int) c >= insn_noperands))
3011 output_operand_lossage ("operand number out of range");
3012 else if (letter == 'l')
3013 output_asm_label (operands[c]);
3014 else if (letter == 'a')
3015 output_address (operands[c]);
3016 else if (letter == 'c')
3017 {
3018 if (CONSTANT_ADDRESS_P (operands[c]))
3019 output_addr_const (asm_out_file, operands[c]);
3020 else
3021 output_operand (operands[c], 'c');
3022 }
3023 else if (letter == 'n')
3024 {
3025 if (GET_CODE (operands[c]) == CONST_INT)
3026 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3027 - INTVAL (operands[c]));
3028 else
3029 {
3030 putc ('-', asm_out_file);
3031 output_addr_const (asm_out_file, operands[c]);
3032 }
3033 }
3034 else
3035 output_operand (operands[c], letter);
3036
3037 if (!opoutput[c])
3038 oporder[ops++] = c;
3039 opoutput[c] = 1;
3040
3041 while (ISDIGIT (c = *p))
3042 p++;
3043 }
3044 /* % followed by a digit outputs an operand the default way. */
3045 else if (ISDIGIT (*p))
3046 {
3047 c = atoi (p);
3048 if (this_is_asm_operands
3049 && (c < 0 || (unsigned int) c >= insn_noperands))
3050 output_operand_lossage ("operand number out of range");
3051 else
3052 output_operand (operands[c], 0);
3053
3054 if (!opoutput[c])
3055 oporder[ops++] = c;
3056 opoutput[c] = 1;
3057
3058 while (ISDIGIT (c = *p))
3059 p++;
3060 }
3061 /* % followed by punctuation: output something for that
3062 punctuation character alone, with no operand.
3063 The PRINT_OPERAND macro decides what is actually done. */
3064 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3065 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3066 output_operand (NULL_RTX, *p++);
3067 #endif
3068 else
3069 output_operand_lossage ("invalid %%-code");
3070 break;
3071
3072 default:
3073 putc (c, asm_out_file);
3074 }
3075
3076 /* Write out the variable names for operands, if we know them. */
3077 if (flag_verbose_asm)
3078 output_asm_operand_names (operands, oporder, ops);
3079 if (flag_print_asm_name)
3080 output_asm_name ();
3081
3082 putc ('\n', asm_out_file);
3083 }
3084 \f
3085 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3086
3087 void
3088 output_asm_label (rtx x)
3089 {
3090 char buf[256];
3091
3092 if (GET_CODE (x) == LABEL_REF)
3093 x = XEXP (x, 0);
3094 if (GET_CODE (x) == CODE_LABEL
3095 || (GET_CODE (x) == NOTE
3096 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3097 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3098 else
3099 output_operand_lossage ("`%%l' operand isn't a label");
3100
3101 assemble_name (asm_out_file, buf);
3102 }
3103
3104 /* Print operand X using machine-dependent assembler syntax.
3105 The macro PRINT_OPERAND is defined just to control this function.
3106 CODE is a non-digit that preceded the operand-number in the % spec,
3107 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3108 between the % and the digits.
3109 When CODE is a non-letter, X is 0.
3110
3111 The meanings of the letters are machine-dependent and controlled
3112 by PRINT_OPERAND. */
3113
3114 static void
3115 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3116 {
3117 if (x && GET_CODE (x) == SUBREG)
3118 x = alter_subreg (&x);
3119
3120 /* If X is a pseudo-register, abort now rather than writing trash to the
3121 assembler file. */
3122
3123 if (x && GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3124 abort ();
3125
3126 PRINT_OPERAND (asm_out_file, x, code);
3127 }
3128
3129 /* Print a memory reference operand for address X
3130 using machine-dependent assembler syntax.
3131 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3132
3133 void
3134 output_address (rtx x)
3135 {
3136 walk_alter_subreg (&x);
3137 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3138 }
3139 \f
3140 /* Print an integer constant expression in assembler syntax.
3141 Addition and subtraction are the only arithmetic
3142 that may appear in these expressions. */
3143
3144 void
3145 output_addr_const (FILE *file, rtx x)
3146 {
3147 char buf[256];
3148
3149 restart:
3150 switch (GET_CODE (x))
3151 {
3152 case PC:
3153 putc ('.', file);
3154 break;
3155
3156 case SYMBOL_REF:
3157 #ifdef ASM_OUTPUT_SYMBOL_REF
3158 ASM_OUTPUT_SYMBOL_REF (file, x);
3159 #else
3160 assemble_name (file, XSTR (x, 0));
3161 #endif
3162 break;
3163
3164 case LABEL_REF:
3165 x = XEXP (x, 0);
3166 /* Fall through. */
3167 case CODE_LABEL:
3168 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3169 #ifdef ASM_OUTPUT_LABEL_REF
3170 ASM_OUTPUT_LABEL_REF (file, buf);
3171 #else
3172 assemble_name (file, buf);
3173 #endif
3174 break;
3175
3176 case CONST_INT:
3177 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3178 break;
3179
3180 case CONST:
3181 /* This used to output parentheses around the expression,
3182 but that does not work on the 386 (either ATT or BSD assembler). */
3183 output_addr_const (file, XEXP (x, 0));
3184 break;
3185
3186 case CONST_DOUBLE:
3187 if (GET_MODE (x) == VOIDmode)
3188 {
3189 /* We can use %d if the number is one word and positive. */
3190 if (CONST_DOUBLE_HIGH (x))
3191 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3192 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3193 else if (CONST_DOUBLE_LOW (x) < 0)
3194 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3195 else
3196 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3197 }
3198 else
3199 /* We can't handle floating point constants;
3200 PRINT_OPERAND must handle them. */
3201 output_operand_lossage ("floating constant misused");
3202 break;
3203
3204 case PLUS:
3205 /* Some assemblers need integer constants to appear last (eg masm). */
3206 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3207 {
3208 output_addr_const (file, XEXP (x, 1));
3209 if (INTVAL (XEXP (x, 0)) >= 0)
3210 fprintf (file, "+");
3211 output_addr_const (file, XEXP (x, 0));
3212 }
3213 else
3214 {
3215 output_addr_const (file, XEXP (x, 0));
3216 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3217 || INTVAL (XEXP (x, 1)) >= 0)
3218 fprintf (file, "+");
3219 output_addr_const (file, XEXP (x, 1));
3220 }
3221 break;
3222
3223 case MINUS:
3224 /* Avoid outputting things like x-x or x+5-x,
3225 since some assemblers can't handle that. */
3226 x = simplify_subtraction (x);
3227 if (GET_CODE (x) != MINUS)
3228 goto restart;
3229
3230 output_addr_const (file, XEXP (x, 0));
3231 fprintf (file, "-");
3232 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3233 || GET_CODE (XEXP (x, 1)) == PC
3234 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3235 output_addr_const (file, XEXP (x, 1));
3236 else
3237 {
3238 fputs (targetm.asm_out.open_paren, file);
3239 output_addr_const (file, XEXP (x, 1));
3240 fputs (targetm.asm_out.close_paren, file);
3241 }
3242 break;
3243
3244 case ZERO_EXTEND:
3245 case SIGN_EXTEND:
3246 case SUBREG:
3247 output_addr_const (file, XEXP (x, 0));
3248 break;
3249
3250 default:
3251 #ifdef OUTPUT_ADDR_CONST_EXTRA
3252 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3253 break;
3254
3255 fail:
3256 #endif
3257 output_operand_lossage ("invalid expression as operand");
3258 }
3259 }
3260 \f
3261 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3262 %R prints the value of REGISTER_PREFIX.
3263 %L prints the value of LOCAL_LABEL_PREFIX.
3264 %U prints the value of USER_LABEL_PREFIX.
3265 %I prints the value of IMMEDIATE_PREFIX.
3266 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3267 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3268
3269 We handle alternate assembler dialects here, just like output_asm_insn. */
3270
3271 void
3272 asm_fprintf (FILE *file, const char *p, ...)
3273 {
3274 char buf[10];
3275 char *q, c;
3276 va_list argptr;
3277
3278 va_start (argptr, p);
3279
3280 buf[0] = '%';
3281
3282 while ((c = *p++))
3283 switch (c)
3284 {
3285 #ifdef ASSEMBLER_DIALECT
3286 case '{':
3287 {
3288 int i;
3289
3290 /* If we want the first dialect, do nothing. Otherwise, skip
3291 DIALECT_NUMBER of strings ending with '|'. */
3292 for (i = 0; i < dialect_number; i++)
3293 {
3294 while (*p && *p++ != '|')
3295 ;
3296
3297 if (*p == '|')
3298 p++;
3299 }
3300 }
3301 break;
3302
3303 case '|':
3304 /* Skip to close brace. */
3305 while (*p && *p++ != '}')
3306 ;
3307 break;
3308
3309 case '}':
3310 break;
3311 #endif
3312
3313 case '%':
3314 c = *p++;
3315 q = &buf[1];
3316 while (strchr ("-+ #0", c))
3317 {
3318 *q++ = c;
3319 c = *p++;
3320 }
3321 while (ISDIGIT (c) || c == '.')
3322 {
3323 *q++ = c;
3324 c = *p++;
3325 }
3326 switch (c)
3327 {
3328 case '%':
3329 putc ('%', file);
3330 break;
3331
3332 case 'd': case 'i': case 'u':
3333 case 'x': case 'X': case 'o':
3334 case 'c':
3335 *q++ = c;
3336 *q = 0;
3337 fprintf (file, buf, va_arg (argptr, int));
3338 break;
3339
3340 case 'w':
3341 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3342 'o' cases, but we do not check for those cases. It
3343 means that the value is a HOST_WIDE_INT, which may be
3344 either `long' or `long long'. */
3345 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3346 q += strlen (HOST_WIDE_INT_PRINT);
3347 *q++ = *p++;
3348 *q = 0;
3349 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3350 break;
3351
3352 case 'l':
3353 *q++ = c;
3354 #ifdef HAVE_LONG_LONG
3355 if (*p == 'l')
3356 {
3357 *q++ = *p++;
3358 *q++ = *p++;
3359 *q = 0;
3360 fprintf (file, buf, va_arg (argptr, long long));
3361 }
3362 else
3363 #endif
3364 {
3365 *q++ = *p++;
3366 *q = 0;
3367 fprintf (file, buf, va_arg (argptr, long));
3368 }
3369
3370 break;
3371
3372 case 's':
3373 *q++ = c;
3374 *q = 0;
3375 fprintf (file, buf, va_arg (argptr, char *));
3376 break;
3377
3378 case 'O':
3379 #ifdef ASM_OUTPUT_OPCODE
3380 ASM_OUTPUT_OPCODE (asm_out_file, p);
3381 #endif
3382 break;
3383
3384 case 'R':
3385 #ifdef REGISTER_PREFIX
3386 fprintf (file, "%s", REGISTER_PREFIX);
3387 #endif
3388 break;
3389
3390 case 'I':
3391 #ifdef IMMEDIATE_PREFIX
3392 fprintf (file, "%s", IMMEDIATE_PREFIX);
3393 #endif
3394 break;
3395
3396 case 'L':
3397 #ifdef LOCAL_LABEL_PREFIX
3398 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3399 #endif
3400 break;
3401
3402 case 'U':
3403 fputs (user_label_prefix, file);
3404 break;
3405
3406 #ifdef ASM_FPRINTF_EXTENSIONS
3407 /* Uppercase letters are reserved for general use by asm_fprintf
3408 and so are not available to target specific code. In order to
3409 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3410 they are defined here. As they get turned into real extensions
3411 to asm_fprintf they should be removed from this list. */
3412 case 'A': case 'B': case 'C': case 'D': case 'E':
3413 case 'F': case 'G': case 'H': case 'J': case 'K':
3414 case 'M': case 'N': case 'P': case 'Q': case 'S':
3415 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3416 break;
3417
3418 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3419 #endif
3420 default:
3421 abort ();
3422 }
3423 break;
3424
3425 default:
3426 putc (c, file);
3427 }
3428 va_end (argptr);
3429 }
3430 \f
3431 /* Split up a CONST_DOUBLE or integer constant rtx
3432 into two rtx's for single words,
3433 storing in *FIRST the word that comes first in memory in the target
3434 and in *SECOND the other. */
3435
3436 void
3437 split_double (rtx value, rtx *first, rtx *second)
3438 {
3439 if (GET_CODE (value) == CONST_INT)
3440 {
3441 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3442 {
3443 /* In this case the CONST_INT holds both target words.
3444 Extract the bits from it into two word-sized pieces.
3445 Sign extend each half to HOST_WIDE_INT. */
3446 unsigned HOST_WIDE_INT low, high;
3447 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3448
3449 /* Set sign_bit to the most significant bit of a word. */
3450 sign_bit = 1;
3451 sign_bit <<= BITS_PER_WORD - 1;
3452
3453 /* Set mask so that all bits of the word are set. We could
3454 have used 1 << BITS_PER_WORD instead of basing the
3455 calculation on sign_bit. However, on machines where
3456 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3457 compiler warning, even though the code would never be
3458 executed. */
3459 mask = sign_bit << 1;
3460 mask--;
3461
3462 /* Set sign_extend as any remaining bits. */
3463 sign_extend = ~mask;
3464
3465 /* Pick the lower word and sign-extend it. */
3466 low = INTVAL (value);
3467 low &= mask;
3468 if (low & sign_bit)
3469 low |= sign_extend;
3470
3471 /* Pick the higher word, shifted to the least significant
3472 bits, and sign-extend it. */
3473 high = INTVAL (value);
3474 high >>= BITS_PER_WORD - 1;
3475 high >>= 1;
3476 high &= mask;
3477 if (high & sign_bit)
3478 high |= sign_extend;
3479
3480 /* Store the words in the target machine order. */
3481 if (WORDS_BIG_ENDIAN)
3482 {
3483 *first = GEN_INT (high);
3484 *second = GEN_INT (low);
3485 }
3486 else
3487 {
3488 *first = GEN_INT (low);
3489 *second = GEN_INT (high);
3490 }
3491 }
3492 else
3493 {
3494 /* The rule for using CONST_INT for a wider mode
3495 is that we regard the value as signed.
3496 So sign-extend it. */
3497 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3498 if (WORDS_BIG_ENDIAN)
3499 {
3500 *first = high;
3501 *second = value;
3502 }
3503 else
3504 {
3505 *first = value;
3506 *second = high;
3507 }
3508 }
3509 }
3510 else if (GET_CODE (value) != CONST_DOUBLE)
3511 {
3512 if (WORDS_BIG_ENDIAN)
3513 {
3514 *first = const0_rtx;
3515 *second = value;
3516 }
3517 else
3518 {
3519 *first = value;
3520 *second = const0_rtx;
3521 }
3522 }
3523 else if (GET_MODE (value) == VOIDmode
3524 /* This is the old way we did CONST_DOUBLE integers. */
3525 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3526 {
3527 /* In an integer, the words are defined as most and least significant.
3528 So order them by the target's convention. */
3529 if (WORDS_BIG_ENDIAN)
3530 {
3531 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3532 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3533 }
3534 else
3535 {
3536 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3537 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3538 }
3539 }
3540 else
3541 {
3542 REAL_VALUE_TYPE r;
3543 long l[2];
3544 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3545
3546 /* Note, this converts the REAL_VALUE_TYPE to the target's
3547 format, splits up the floating point double and outputs
3548 exactly 32 bits of it into each of l[0] and l[1] --
3549 not necessarily BITS_PER_WORD bits. */
3550 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3551
3552 /* If 32 bits is an entire word for the target, but not for the host,
3553 then sign-extend on the host so that the number will look the same
3554 way on the host that it would on the target. See for instance
3555 simplify_unary_operation. The #if is needed to avoid compiler
3556 warnings. */
3557
3558 #if HOST_BITS_PER_LONG > 32
3559 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3560 {
3561 if (l[0] & ((long) 1 << 31))
3562 l[0] |= ((long) (-1) << 32);
3563 if (l[1] & ((long) 1 << 31))
3564 l[1] |= ((long) (-1) << 32);
3565 }
3566 #endif
3567
3568 *first = GEN_INT ((HOST_WIDE_INT) l[0]);
3569 *second = GEN_INT ((HOST_WIDE_INT) l[1]);
3570 }
3571 }
3572 \f
3573 /* Return nonzero if this function has no function calls. */
3574
3575 int
3576 leaf_function_p (void)
3577 {
3578 rtx insn;
3579 rtx link;
3580
3581 if (current_function_profile || profile_arc_flag)
3582 return 0;
3583
3584 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3585 {
3586 if (GET_CODE (insn) == CALL_INSN
3587 && ! SIBLING_CALL_P (insn))
3588 return 0;
3589 if (GET_CODE (insn) == INSN
3590 && GET_CODE (PATTERN (insn)) == SEQUENCE
3591 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3592 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3593 return 0;
3594 }
3595 for (link = current_function_epilogue_delay_list;
3596 link;
3597 link = XEXP (link, 1))
3598 {
3599 insn = XEXP (link, 0);
3600
3601 if (GET_CODE (insn) == CALL_INSN
3602 && ! SIBLING_CALL_P (insn))
3603 return 0;
3604 if (GET_CODE (insn) == INSN
3605 && GET_CODE (PATTERN (insn)) == SEQUENCE
3606 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3607 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3608 return 0;
3609 }
3610
3611 return 1;
3612 }
3613
3614 /* Return 1 if branch is a forward branch.
3615 Uses insn_shuid array, so it works only in the final pass. May be used by
3616 output templates to customary add branch prediction hints.
3617 */
3618 int
3619 final_forward_branch_p (rtx insn)
3620 {
3621 int insn_id, label_id;
3622 if (!uid_shuid)
3623 abort ();
3624 insn_id = INSN_SHUID (insn);
3625 label_id = INSN_SHUID (JUMP_LABEL (insn));
3626 /* We've hit some insns that does not have id information available. */
3627 if (!insn_id || !label_id)
3628 abort ();
3629 return insn_id < label_id;
3630 }
3631
3632 /* On some machines, a function with no call insns
3633 can run faster if it doesn't create its own register window.
3634 When output, the leaf function should use only the "output"
3635 registers. Ordinarily, the function would be compiled to use
3636 the "input" registers to find its arguments; it is a candidate
3637 for leaf treatment if it uses only the "input" registers.
3638 Leaf function treatment means renumbering so the function
3639 uses the "output" registers instead. */
3640
3641 #ifdef LEAF_REGISTERS
3642
3643 /* Return 1 if this function uses only the registers that can be
3644 safely renumbered. */
3645
3646 int
3647 only_leaf_regs_used (void)
3648 {
3649 int i;
3650 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3651
3652 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653 if ((regs_ever_live[i] || global_regs[i])
3654 && ! permitted_reg_in_leaf_functions[i])
3655 return 0;
3656
3657 if (current_function_uses_pic_offset_table
3658 && pic_offset_table_rtx != 0
3659 && GET_CODE (pic_offset_table_rtx) == REG
3660 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3661 return 0;
3662
3663 return 1;
3664 }
3665
3666 /* Scan all instructions and renumber all registers into those
3667 available in leaf functions. */
3668
3669 static void
3670 leaf_renumber_regs (rtx first)
3671 {
3672 rtx insn;
3673
3674 /* Renumber only the actual patterns.
3675 The reg-notes can contain frame pointer refs,
3676 and renumbering them could crash, and should not be needed. */
3677 for (insn = first; insn; insn = NEXT_INSN (insn))
3678 if (INSN_P (insn))
3679 leaf_renumber_regs_insn (PATTERN (insn));
3680 for (insn = current_function_epilogue_delay_list;
3681 insn;
3682 insn = XEXP (insn, 1))
3683 if (INSN_P (XEXP (insn, 0)))
3684 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3685 }
3686
3687 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3688 available in leaf functions. */
3689
3690 void
3691 leaf_renumber_regs_insn (rtx in_rtx)
3692 {
3693 int i, j;
3694 const char *format_ptr;
3695
3696 if (in_rtx == 0)
3697 return;
3698
3699 /* Renumber all input-registers into output-registers.
3700 renumbered_regs would be 1 for an output-register;
3701 they */
3702
3703 if (GET_CODE (in_rtx) == REG)
3704 {
3705 int newreg;
3706
3707 /* Don't renumber the same reg twice. */
3708 if (in_rtx->used)
3709 return;
3710
3711 newreg = REGNO (in_rtx);
3712 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3713 to reach here as part of a REG_NOTE. */
3714 if (newreg >= FIRST_PSEUDO_REGISTER)
3715 {
3716 in_rtx->used = 1;
3717 return;
3718 }
3719 newreg = LEAF_REG_REMAP (newreg);
3720 if (newreg < 0)
3721 abort ();
3722 regs_ever_live[REGNO (in_rtx)] = 0;
3723 regs_ever_live[newreg] = 1;
3724 REGNO (in_rtx) = newreg;
3725 in_rtx->used = 1;
3726 }
3727
3728 if (INSN_P (in_rtx))
3729 {
3730 /* Inside a SEQUENCE, we find insns.
3731 Renumber just the patterns of these insns,
3732 just as we do for the top-level insns. */
3733 leaf_renumber_regs_insn (PATTERN (in_rtx));
3734 return;
3735 }
3736
3737 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3738
3739 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3740 switch (*format_ptr++)
3741 {
3742 case 'e':
3743 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3744 break;
3745
3746 case 'E':
3747 if (NULL != XVEC (in_rtx, i))
3748 {
3749 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3750 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3751 }
3752 break;
3753
3754 case 'S':
3755 case 's':
3756 case '0':
3757 case 'i':
3758 case 'w':
3759 case 'n':
3760 case 'u':
3761 break;
3762
3763 default:
3764 abort ();
3765 }
3766 }
3767 #endif
3768
3769
3770 /* When -gused is used, emit debug info for only used symbols. But in
3771 addition to the standard intercepted debug_hooks there are some direct
3772 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3773 Those routines may also be called from a higher level intercepted routine. So
3774 to prevent recording data for an inner call to one of these for an intercept,
3775 we maintain an intercept nesting counter (debug_nesting). We only save the
3776 intercepted arguments if the nesting is 1. */
3777 int debug_nesting = 0;
3778
3779 static tree *symbol_queue;
3780 int symbol_queue_index = 0;
3781 static int symbol_queue_size = 0;
3782
3783 /* Generate the symbols for any queued up type symbols we encountered
3784 while generating the type info for some originally used symbol.
3785 This might generate additional entries in the queue. Only when
3786 the nesting depth goes to 0 is this routine called. */
3787
3788 void
3789 debug_flush_symbol_queue (void)
3790 {
3791 int i;
3792
3793 /* Make sure that additionally queued items are not flushed
3794 prematurely. */
3795
3796 ++debug_nesting;
3797
3798 for (i = 0; i < symbol_queue_index; ++i)
3799 {
3800 /* If we pushed queued symbols then such symbols are must be
3801 output no matter what anyone else says. Specifically,
3802 we need to make sure dbxout_symbol() thinks the symbol was
3803 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3804 which may be set for outside reasons. */
3805 int saved_tree_used = TREE_USED (symbol_queue[i]);
3806 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3807 TREE_USED (symbol_queue[i]) = 1;
3808 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3809
3810 #ifdef DBX_DEBUGGING_INFO
3811 dbxout_symbol (symbol_queue[i], 0);
3812 #endif
3813
3814 TREE_USED (symbol_queue[i]) = saved_tree_used;
3815 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3816 }
3817
3818 symbol_queue_index = 0;
3819 --debug_nesting;
3820 }
3821
3822 /* Queue a type symbol needed as part of the definition of a decl
3823 symbol. These symbols are generated when debug_flush_symbol_queue()
3824 is called. */
3825
3826 void
3827 debug_queue_symbol (tree decl)
3828 {
3829 if (symbol_queue_index >= symbol_queue_size)
3830 {
3831 symbol_queue_size += 10;
3832 symbol_queue = xrealloc (symbol_queue,
3833 symbol_queue_size * sizeof (tree));
3834 }
3835
3836 symbol_queue[symbol_queue_index++] = decl;
3837 }
3838
3839 /* Free symbol queue. */
3840 void
3841 debug_free_queue (void)
3842 {
3843 if (symbol_queue)
3844 {
3845 free (symbol_queue);
3846 symbol_queue = NULL;
3847 symbol_queue_size = 0;
3848 }
3849 }