820cb680ab20addab1624e09c1b3b5ec75a438db
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx debug_insn;
116 rtx current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Highest line number in current block. */
122 static int high_block_linenum;
123
124 /* Likewise for function. */
125 static int high_function_linenum;
126
127 /* Filename of last NOTE. */
128 static const char *last_filename;
129
130 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
131
132 /* Nonzero while outputting an `asm' with operands.
133 This means that inconsistencies are the user's fault, so don't abort.
134 The precise value is the insn being output, to pass to error_for_asm. */
135 rtx this_is_asm_operands;
136
137 /* Number of operands of this insn, for an `asm' with operands. */
138 static unsigned int insn_noperands;
139
140 /* Compare optimization flag. */
141
142 static rtx last_ignored_compare = 0;
143
144 /* Assign a unique number to each insn that is output.
145 This can be used to generate unique local labels. */
146
147 static int insn_counter = 0;
148
149 #ifdef HAVE_cc0
150 /* This variable contains machine-dependent flags (defined in tm.h)
151 set and examined by output routines
152 that describe how to interpret the condition codes properly. */
153
154 CC_STATUS cc_status;
155
156 /* During output of an insn, this contains a copy of cc_status
157 from before the insn. */
158
159 CC_STATUS cc_prev_status;
160 #endif
161
162 /* Indexed by hardware reg number, is 1 if that register is ever
163 used in the current function.
164
165 In life_analysis, or in stupid_life_analysis, this is set
166 up to record the hard regs used explicitly. Reload adds
167 in the hard regs used for holding pseudo regs. Final uses
168 it to generate the code in the function prologue and epilogue
169 to save and restore registers as needed. */
170
171 char regs_ever_live[FIRST_PSEUDO_REGISTER];
172
173 /* Nonzero means current function must be given a frame pointer.
174 Initialized in function.c to 0. Set only in reload1.c as per
175 the needs of the function. */
176
177 int frame_pointer_needed;
178
179 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
180
181 static int block_depth;
182
183 /* Nonzero if have enabled APP processing of our assembler output. */
184
185 static int app_on;
186
187 /* If we are outputting an insn sequence, this contains the sequence rtx.
188 Zero otherwise. */
189
190 rtx final_sequence;
191
192 #ifdef ASSEMBLER_DIALECT
193
194 /* Number of the assembler dialect to use, starting at 0. */
195 static int dialect_number;
196 #endif
197
198 /* Indexed by line number, nonzero if there is a note for that line. */
199
200 static char *line_note_exists;
201
202 #ifdef HAVE_conditional_execution
203 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
204 rtx current_insn_predicate;
205 #endif
206
207 #ifdef HAVE_ATTR_length
208 static int asm_insn_count (rtx);
209 #endif
210 static void profile_function (FILE *);
211 static void profile_after_prologue (FILE *);
212 static bool notice_source_line (rtx);
213 static rtx walk_alter_subreg (rtx *);
214 static void output_asm_name (void);
215 static void output_alternate_entry_point (FILE *, rtx);
216 static tree get_mem_expr_from_op (rtx, int *);
217 static void output_asm_operand_names (rtx *, int *, int);
218 static void output_operand (rtx, int);
219 #ifdef LEAF_REGISTERS
220 static void leaf_renumber_regs (rtx);
221 #endif
222 #ifdef HAVE_cc0
223 static int alter_cond (rtx);
224 #endif
225 #ifndef ADDR_VEC_ALIGN
226 static int final_addr_vec_align (rtx);
227 #endif
228 #ifdef HAVE_ATTR_length
229 static int align_fuzz (rtx, rtx, int, unsigned);
230 #endif
231 \f
232 /* Initialize data in final at the beginning of a compilation. */
233
234 void
235 init_final (const char *filename ATTRIBUTE_UNUSED)
236 {
237 app_on = 0;
238 final_sequence = 0;
239
240 #ifdef ASSEMBLER_DIALECT
241 dialect_number = ASSEMBLER_DIALECT;
242 #endif
243 }
244
245 /* Default target function prologue and epilogue assembler output.
246
247 If not overridden for epilogue code, then the function body itself
248 contains return instructions wherever needed. */
249 void
250 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
252 {
253 }
254
255 /* Default target hook that outputs nothing to a stream. */
256 void
257 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
258 {
259 }
260
261 /* Enable APP processing of subsequent output.
262 Used before the output from an `asm' statement. */
263
264 void
265 app_enable (void)
266 {
267 if (! app_on)
268 {
269 fputs (ASM_APP_ON, asm_out_file);
270 app_on = 1;
271 }
272 }
273
274 /* Disable APP processing of subsequent output.
275 Called from varasm.c before most kinds of output. */
276
277 void
278 app_disable (void)
279 {
280 if (app_on)
281 {
282 fputs (ASM_APP_OFF, asm_out_file);
283 app_on = 0;
284 }
285 }
286 \f
287 /* Return the number of slots filled in the current
288 delayed branch sequence (we don't count the insn needing the
289 delay slot). Zero if not in a delayed branch sequence. */
290
291 #ifdef DELAY_SLOTS
292 int
293 dbr_sequence_length (void)
294 {
295 if (final_sequence != 0)
296 return XVECLEN (final_sequence, 0) - 1;
297 else
298 return 0;
299 }
300 #endif
301 \f
302 /* The next two pages contain routines used to compute the length of an insn
303 and to shorten branches. */
304
305 /* Arrays for insn lengths, and addresses. The latter is referenced by
306 `insn_current_length'. */
307
308 static int *insn_lengths;
309
310 varray_type insn_addresses_;
311
312 /* Max uid for which the above arrays are valid. */
313 static int insn_lengths_max_uid;
314
315 /* Address of insn being processed. Used by `insn_current_length'. */
316 int insn_current_address;
317
318 /* Address of insn being processed in previous iteration. */
319 int insn_last_address;
320
321 /* known invariant alignment of insn being processed. */
322 int insn_current_align;
323
324 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
325 gives the next following alignment insn that increases the known
326 alignment, or NULL_RTX if there is no such insn.
327 For any alignment obtained this way, we can again index uid_align with
328 its uid to obtain the next following align that in turn increases the
329 alignment, till we reach NULL_RTX; the sequence obtained this way
330 for each insn we'll call the alignment chain of this insn in the following
331 comments. */
332
333 struct label_alignment
334 {
335 short alignment;
336 short max_skip;
337 };
338
339 static rtx *uid_align;
340 static int *uid_shuid;
341 static struct label_alignment *label_align;
342
343 /* Indicate that branch shortening hasn't yet been done. */
344
345 void
346 init_insn_lengths (void)
347 {
348 if (uid_shuid)
349 {
350 free (uid_shuid);
351 uid_shuid = 0;
352 }
353 if (insn_lengths)
354 {
355 free (insn_lengths);
356 insn_lengths = 0;
357 insn_lengths_max_uid = 0;
358 }
359 #ifdef HAVE_ATTR_length
360 INSN_ADDRESSES_FREE ();
361 #endif
362 if (uid_align)
363 {
364 free (uid_align);
365 uid_align = 0;
366 }
367 }
368
369 /* Obtain the current length of an insn. If branch shortening has been done,
370 get its actual length. Otherwise, get its maximum length. */
371
372 int
373 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
374 {
375 #ifdef HAVE_ATTR_length
376 rtx body;
377 int i;
378 int length = 0;
379
380 if (insn_lengths_max_uid > INSN_UID (insn))
381 return insn_lengths[INSN_UID (insn)];
382 else
383 switch (GET_CODE (insn))
384 {
385 case NOTE:
386 case BARRIER:
387 case CODE_LABEL:
388 return 0;
389
390 case CALL_INSN:
391 length = insn_default_length (insn);
392 break;
393
394 case JUMP_INSN:
395 body = PATTERN (insn);
396 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
397 {
398 /* Alignment is machine-dependent and should be handled by
399 ADDR_VEC_ALIGN. */
400 }
401 else
402 length = insn_default_length (insn);
403 break;
404
405 case INSN:
406 body = PATTERN (insn);
407 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
408 return 0;
409
410 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
411 length = asm_insn_count (body) * insn_default_length (insn);
412 else if (GET_CODE (body) == SEQUENCE)
413 for (i = 0; i < XVECLEN (body, 0); i++)
414 length += get_attr_length (XVECEXP (body, 0, i));
415 else
416 length = insn_default_length (insn);
417 break;
418
419 default:
420 break;
421 }
422
423 #ifdef ADJUST_INSN_LENGTH
424 ADJUST_INSN_LENGTH (insn, length);
425 #endif
426 return length;
427 #else /* not HAVE_ATTR_length */
428 return 0;
429 #endif /* not HAVE_ATTR_length */
430 }
431 \f
432 /* Code to handle alignment inside shorten_branches. */
433
434 /* Here is an explanation how the algorithm in align_fuzz can give
435 proper results:
436
437 Call a sequence of instructions beginning with alignment point X
438 and continuing until the next alignment point `block X'. When `X'
439 is used in an expression, it means the alignment value of the
440 alignment point.
441
442 Call the distance between the start of the first insn of block X, and
443 the end of the last insn of block X `IX', for the `inner size of X'.
444 This is clearly the sum of the instruction lengths.
445
446 Likewise with the next alignment-delimited block following X, which we
447 shall call block Y.
448
449 Call the distance between the start of the first insn of block X, and
450 the start of the first insn of block Y `OX', for the `outer size of X'.
451
452 The estimated padding is then OX - IX.
453
454 OX can be safely estimated as
455
456 if (X >= Y)
457 OX = round_up(IX, Y)
458 else
459 OX = round_up(IX, X) + Y - X
460
461 Clearly est(IX) >= real(IX), because that only depends on the
462 instruction lengths, and those being overestimated is a given.
463
464 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
465 we needn't worry about that when thinking about OX.
466
467 When X >= Y, the alignment provided by Y adds no uncertainty factor
468 for branch ranges starting before X, so we can just round what we have.
469 But when X < Y, we don't know anything about the, so to speak,
470 `middle bits', so we have to assume the worst when aligning up from an
471 address mod X to one mod Y, which is Y - X. */
472
473 #ifndef LABEL_ALIGN
474 #define LABEL_ALIGN(LABEL) align_labels_log
475 #endif
476
477 #ifndef LABEL_ALIGN_MAX_SKIP
478 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
479 #endif
480
481 #ifndef LOOP_ALIGN
482 #define LOOP_ALIGN(LABEL) align_loops_log
483 #endif
484
485 #ifndef LOOP_ALIGN_MAX_SKIP
486 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
487 #endif
488
489 #ifndef LABEL_ALIGN_AFTER_BARRIER
490 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
491 #endif
492
493 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
494 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
495 #endif
496
497 #ifndef JUMP_ALIGN
498 #define JUMP_ALIGN(LABEL) align_jumps_log
499 #endif
500
501 #ifndef JUMP_ALIGN_MAX_SKIP
502 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
503 #endif
504
505 #ifndef ADDR_VEC_ALIGN
506 static int
507 final_addr_vec_align (rtx addr_vec)
508 {
509 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
510
511 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
512 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
513 return exact_log2 (align);
514
515 }
516
517 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
518 #endif
519
520 #ifndef INSN_LENGTH_ALIGNMENT
521 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
522 #endif
523
524 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
525
526 static int min_labelno, max_labelno;
527
528 #define LABEL_TO_ALIGNMENT(LABEL) \
529 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
530
531 #define LABEL_TO_MAX_SKIP(LABEL) \
532 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
533
534 /* For the benefit of port specific code do this also as a function. */
535
536 int
537 label_to_alignment (rtx label)
538 {
539 return LABEL_TO_ALIGNMENT (label);
540 }
541
542 #ifdef HAVE_ATTR_length
543 /* The differences in addresses
544 between a branch and its target might grow or shrink depending on
545 the alignment the start insn of the range (the branch for a forward
546 branch or the label for a backward branch) starts out on; if these
547 differences are used naively, they can even oscillate infinitely.
548 We therefore want to compute a 'worst case' address difference that
549 is independent of the alignment the start insn of the range end
550 up on, and that is at least as large as the actual difference.
551 The function align_fuzz calculates the amount we have to add to the
552 naively computed difference, by traversing the part of the alignment
553 chain of the start insn of the range that is in front of the end insn
554 of the range, and considering for each alignment the maximum amount
555 that it might contribute to a size increase.
556
557 For casesi tables, we also want to know worst case minimum amounts of
558 address difference, in case a machine description wants to introduce
559 some common offset that is added to all offsets in a table.
560 For this purpose, align_fuzz with a growth argument of 0 computes the
561 appropriate adjustment. */
562
563 /* Compute the maximum delta by which the difference of the addresses of
564 START and END might grow / shrink due to a different address for start
565 which changes the size of alignment insns between START and END.
566 KNOWN_ALIGN_LOG is the alignment known for START.
567 GROWTH should be ~0 if the objective is to compute potential code size
568 increase, and 0 if the objective is to compute potential shrink.
569 The return value is undefined for any other value of GROWTH. */
570
571 static int
572 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
573 {
574 int uid = INSN_UID (start);
575 rtx align_label;
576 int known_align = 1 << known_align_log;
577 int end_shuid = INSN_SHUID (end);
578 int fuzz = 0;
579
580 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
581 {
582 int align_addr, new_align;
583
584 uid = INSN_UID (align_label);
585 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
586 if (uid_shuid[uid] > end_shuid)
587 break;
588 known_align_log = LABEL_TO_ALIGNMENT (align_label);
589 new_align = 1 << known_align_log;
590 if (new_align < known_align)
591 continue;
592 fuzz += (-align_addr ^ growth) & (new_align - known_align);
593 known_align = new_align;
594 }
595 return fuzz;
596 }
597
598 /* Compute a worst-case reference address of a branch so that it
599 can be safely used in the presence of aligned labels. Since the
600 size of the branch itself is unknown, the size of the branch is
601 not included in the range. I.e. for a forward branch, the reference
602 address is the end address of the branch as known from the previous
603 branch shortening pass, minus a value to account for possible size
604 increase due to alignment. For a backward branch, it is the start
605 address of the branch as known from the current pass, plus a value
606 to account for possible size increase due to alignment.
607 NB.: Therefore, the maximum offset allowed for backward branches needs
608 to exclude the branch size. */
609
610 int
611 insn_current_reference_address (rtx branch)
612 {
613 rtx dest, seq;
614 int seq_uid;
615
616 if (! INSN_ADDRESSES_SET_P ())
617 return 0;
618
619 seq = NEXT_INSN (PREV_INSN (branch));
620 seq_uid = INSN_UID (seq);
621 if (GET_CODE (branch) != JUMP_INSN)
622 /* This can happen for example on the PA; the objective is to know the
623 offset to address something in front of the start of the function.
624 Thus, we can treat it like a backward branch.
625 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
626 any alignment we'd encounter, so we skip the call to align_fuzz. */
627 return insn_current_address;
628 dest = JUMP_LABEL (branch);
629
630 /* BRANCH has no proper alignment chain set, so use SEQ.
631 BRANCH also has no INSN_SHUID. */
632 if (INSN_SHUID (seq) < INSN_SHUID (dest))
633 {
634 /* Forward branch. */
635 return (insn_last_address + insn_lengths[seq_uid]
636 - align_fuzz (seq, dest, length_unit_log, ~0));
637 }
638 else
639 {
640 /* Backward branch. */
641 return (insn_current_address
642 + align_fuzz (dest, seq, length_unit_log, ~0));
643 }
644 }
645 #endif /* HAVE_ATTR_length */
646 \f
647 void
648 compute_alignments (void)
649 {
650 int log, max_skip, max_log;
651 basic_block bb;
652
653 if (label_align)
654 {
655 free (label_align);
656 label_align = 0;
657 }
658
659 max_labelno = max_label_num ();
660 min_labelno = get_first_label_num ();
661 label_align = xcalloc (max_labelno - min_labelno + 1,
662 sizeof (struct label_alignment));
663
664 /* If not optimizing or optimizing for size, don't assign any alignments. */
665 if (! optimize || optimize_size)
666 return;
667
668 FOR_EACH_BB (bb)
669 {
670 rtx label = bb->head;
671 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
672 edge e;
673
674 if (GET_CODE (label) != CODE_LABEL
675 || probably_never_executed_bb_p (bb))
676 continue;
677 max_log = LABEL_ALIGN (label);
678 max_skip = LABEL_ALIGN_MAX_SKIP;
679
680 for (e = bb->pred; e; e = e->pred_next)
681 {
682 if (e->flags & EDGE_FALLTHRU)
683 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
684 else
685 branch_frequency += EDGE_FREQUENCY (e);
686 }
687
688 /* There are two purposes to align block with no fallthru incoming edge:
689 1) to avoid fetch stalls when branch destination is near cache boundary
690 2) to improve cache efficiency in case the previous block is not executed
691 (so it does not need to be in the cache).
692
693 We to catch first case, we align frequently executed blocks.
694 To catch the second, we align blocks that are executed more frequently
695 than the predecessor and the predecessor is likely to not be executed
696 when function is called. */
697
698 if (!has_fallthru
699 && (branch_frequency > BB_FREQ_MAX / 10
700 || (bb->frequency > bb->prev_bb->frequency * 10
701 && (bb->prev_bb->frequency
702 <= ENTRY_BLOCK_PTR->frequency / 2))))
703 {
704 log = JUMP_ALIGN (label);
705 if (max_log < log)
706 {
707 max_log = log;
708 max_skip = JUMP_ALIGN_MAX_SKIP;
709 }
710 }
711 /* In case block is frequent and reached mostly by non-fallthru edge,
712 align it. It is most likely a first block of loop. */
713 if (has_fallthru
714 && maybe_hot_bb_p (bb)
715 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
716 && branch_frequency > fallthru_frequency * 2)
717 {
718 log = LOOP_ALIGN (label);
719 if (max_log < log)
720 {
721 max_log = log;
722 max_skip = LOOP_ALIGN_MAX_SKIP;
723 }
724 }
725 LABEL_TO_ALIGNMENT (label) = max_log;
726 LABEL_TO_MAX_SKIP (label) = max_skip;
727 }
728 }
729 \f
730 /* Make a pass over all insns and compute their actual lengths by shortening
731 any branches of variable length if possible. */
732
733 /* Give a default value for the lowest address in a function. */
734
735 #ifndef FIRST_INSN_ADDRESS
736 #define FIRST_INSN_ADDRESS 0
737 #endif
738
739 /* shorten_branches might be called multiple times: for example, the SH
740 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
741 In order to do this, it needs proper length information, which it obtains
742 by calling shorten_branches. This cannot be collapsed with
743 shorten_branches itself into a single pass unless we also want to integrate
744 reorg.c, since the branch splitting exposes new instructions with delay
745 slots. */
746
747 void
748 shorten_branches (rtx first ATTRIBUTE_UNUSED)
749 {
750 rtx insn;
751 int max_uid;
752 int i;
753 int max_log;
754 int max_skip;
755 #ifdef HAVE_ATTR_length
756 #define MAX_CODE_ALIGN 16
757 rtx seq;
758 int something_changed = 1;
759 char *varying_length;
760 rtx body;
761 int uid;
762 rtx align_tab[MAX_CODE_ALIGN];
763
764 #endif
765
766 /* Compute maximum UID and allocate label_align / uid_shuid. */
767 max_uid = get_max_uid ();
768
769 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
770
771 if (max_labelno != max_label_num ())
772 {
773 int old = max_labelno;
774 int n_labels;
775 int n_old_labels;
776
777 max_labelno = max_label_num ();
778
779 n_labels = max_labelno - min_labelno + 1;
780 n_old_labels = old - min_labelno + 1;
781
782 label_align = xrealloc (label_align,
783 n_labels * sizeof (struct label_alignment));
784
785 /* Range of labels grows monotonically in the function. Abort here
786 means that the initialization of array got lost. */
787 if (n_old_labels > n_labels)
788 abort ();
789
790 memset (label_align + n_old_labels, 0,
791 (n_labels - n_old_labels) * sizeof (struct label_alignment));
792 }
793
794 /* Initialize label_align and set up uid_shuid to be strictly
795 monotonically rising with insn order. */
796 /* We use max_log here to keep track of the maximum alignment we want to
797 impose on the next CODE_LABEL (or the current one if we are processing
798 the CODE_LABEL itself). */
799
800 max_log = 0;
801 max_skip = 0;
802
803 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
804 {
805 int log;
806
807 INSN_SHUID (insn) = i++;
808 if (INSN_P (insn))
809 {
810 /* reorg might make the first insn of a loop being run once only,
811 and delete the label in front of it. Then we want to apply
812 the loop alignment to the new label created by reorg, which
813 is separated by the former loop start insn from the
814 NOTE_INSN_LOOP_BEG. */
815 }
816 else if (GET_CODE (insn) == CODE_LABEL)
817 {
818 rtx next;
819
820 /* Merge in alignments computed by compute_alignments. */
821 log = LABEL_TO_ALIGNMENT (insn);
822 if (max_log < log)
823 {
824 max_log = log;
825 max_skip = LABEL_TO_MAX_SKIP (insn);
826 }
827
828 log = LABEL_ALIGN (insn);
829 if (max_log < log)
830 {
831 max_log = log;
832 max_skip = LABEL_ALIGN_MAX_SKIP;
833 }
834 next = NEXT_INSN (insn);
835 /* ADDR_VECs only take room if read-only data goes into the text
836 section. */
837 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
838 if (next && GET_CODE (next) == JUMP_INSN)
839 {
840 rtx nextbody = PATTERN (next);
841 if (GET_CODE (nextbody) == ADDR_VEC
842 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
843 {
844 log = ADDR_VEC_ALIGN (next);
845 if (max_log < log)
846 {
847 max_log = log;
848 max_skip = LABEL_ALIGN_MAX_SKIP;
849 }
850 }
851 }
852 LABEL_TO_ALIGNMENT (insn) = max_log;
853 LABEL_TO_MAX_SKIP (insn) = max_skip;
854 max_log = 0;
855 max_skip = 0;
856 }
857 else if (GET_CODE (insn) == BARRIER)
858 {
859 rtx label;
860
861 for (label = insn; label && ! INSN_P (label);
862 label = NEXT_INSN (label))
863 if (GET_CODE (label) == CODE_LABEL)
864 {
865 log = LABEL_ALIGN_AFTER_BARRIER (insn);
866 if (max_log < log)
867 {
868 max_log = log;
869 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
870 }
871 break;
872 }
873 }
874 }
875 #ifdef HAVE_ATTR_length
876
877 /* Allocate the rest of the arrays. */
878 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
879 insn_lengths_max_uid = max_uid;
880 /* Syntax errors can lead to labels being outside of the main insn stream.
881 Initialize insn_addresses, so that we get reproducible results. */
882 INSN_ADDRESSES_ALLOC (max_uid);
883
884 varying_length = xcalloc (max_uid, sizeof (char));
885
886 /* Initialize uid_align. We scan instructions
887 from end to start, and keep in align_tab[n] the last seen insn
888 that does an alignment of at least n+1, i.e. the successor
889 in the alignment chain for an insn that does / has a known
890 alignment of n. */
891 uid_align = xcalloc (max_uid, sizeof *uid_align);
892
893 for (i = MAX_CODE_ALIGN; --i >= 0;)
894 align_tab[i] = NULL_RTX;
895 seq = get_last_insn ();
896 for (; seq; seq = PREV_INSN (seq))
897 {
898 int uid = INSN_UID (seq);
899 int log;
900 log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
901 uid_align[uid] = align_tab[0];
902 if (log)
903 {
904 /* Found an alignment label. */
905 uid_align[uid] = align_tab[log];
906 for (i = log - 1; i >= 0; i--)
907 align_tab[i] = seq;
908 }
909 }
910 #ifdef CASE_VECTOR_SHORTEN_MODE
911 if (optimize)
912 {
913 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
914 label fields. */
915
916 int min_shuid = INSN_SHUID (get_insns ()) - 1;
917 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
918 int rel;
919
920 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
921 {
922 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
923 int len, i, min, max, insn_shuid;
924 int min_align;
925 addr_diff_vec_flags flags;
926
927 if (GET_CODE (insn) != JUMP_INSN
928 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
929 continue;
930 pat = PATTERN (insn);
931 len = XVECLEN (pat, 1);
932 if (len <= 0)
933 abort ();
934 min_align = MAX_CODE_ALIGN;
935 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
936 {
937 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
938 int shuid = INSN_SHUID (lab);
939 if (shuid < min)
940 {
941 min = shuid;
942 min_lab = lab;
943 }
944 if (shuid > max)
945 {
946 max = shuid;
947 max_lab = lab;
948 }
949 if (min_align > LABEL_TO_ALIGNMENT (lab))
950 min_align = LABEL_TO_ALIGNMENT (lab);
951 }
952 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
953 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
954 insn_shuid = INSN_SHUID (insn);
955 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
956 flags.min_align = min_align;
957 flags.base_after_vec = rel > insn_shuid;
958 flags.min_after_vec = min > insn_shuid;
959 flags.max_after_vec = max > insn_shuid;
960 flags.min_after_base = min > rel;
961 flags.max_after_base = max > rel;
962 ADDR_DIFF_VEC_FLAGS (pat) = flags;
963 }
964 }
965 #endif /* CASE_VECTOR_SHORTEN_MODE */
966
967 /* Compute initial lengths, addresses, and varying flags for each insn. */
968 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
969 insn != 0;
970 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
971 {
972 uid = INSN_UID (insn);
973
974 insn_lengths[uid] = 0;
975
976 if (GET_CODE (insn) == CODE_LABEL)
977 {
978 int log = LABEL_TO_ALIGNMENT (insn);
979 if (log)
980 {
981 int align = 1 << log;
982 int new_address = (insn_current_address + align - 1) & -align;
983 insn_lengths[uid] = new_address - insn_current_address;
984 }
985 }
986
987 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
988
989 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
990 || GET_CODE (insn) == CODE_LABEL)
991 continue;
992 if (INSN_DELETED_P (insn))
993 continue;
994
995 body = PATTERN (insn);
996 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
997 {
998 /* This only takes room if read-only data goes into the text
999 section. */
1000 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1001 insn_lengths[uid] = (XVECLEN (body,
1002 GET_CODE (body) == ADDR_DIFF_VEC)
1003 * GET_MODE_SIZE (GET_MODE (body)));
1004 /* Alignment is handled by ADDR_VEC_ALIGN. */
1005 }
1006 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1007 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1008 else if (GET_CODE (body) == SEQUENCE)
1009 {
1010 int i;
1011 int const_delay_slots;
1012 #ifdef DELAY_SLOTS
1013 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1014 #else
1015 const_delay_slots = 0;
1016 #endif
1017 /* Inside a delay slot sequence, we do not do any branch shortening
1018 if the shortening could change the number of delay slots
1019 of the branch. */
1020 for (i = 0; i < XVECLEN (body, 0); i++)
1021 {
1022 rtx inner_insn = XVECEXP (body, 0, i);
1023 int inner_uid = INSN_UID (inner_insn);
1024 int inner_length;
1025
1026 if (GET_CODE (body) == ASM_INPUT
1027 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1028 inner_length = (asm_insn_count (PATTERN (inner_insn))
1029 * insn_default_length (inner_insn));
1030 else
1031 inner_length = insn_default_length (inner_insn);
1032
1033 insn_lengths[inner_uid] = inner_length;
1034 if (const_delay_slots)
1035 {
1036 if ((varying_length[inner_uid]
1037 = insn_variable_length_p (inner_insn)) != 0)
1038 varying_length[uid] = 1;
1039 INSN_ADDRESSES (inner_uid) = (insn_current_address
1040 + insn_lengths[uid]);
1041 }
1042 else
1043 varying_length[inner_uid] = 0;
1044 insn_lengths[uid] += inner_length;
1045 }
1046 }
1047 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1048 {
1049 insn_lengths[uid] = insn_default_length (insn);
1050 varying_length[uid] = insn_variable_length_p (insn);
1051 }
1052
1053 /* If needed, do any adjustment. */
1054 #ifdef ADJUST_INSN_LENGTH
1055 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1056 if (insn_lengths[uid] < 0)
1057 fatal_insn ("negative insn length", insn);
1058 #endif
1059 }
1060
1061 /* Now loop over all the insns finding varying length insns. For each,
1062 get the current insn length. If it has changed, reflect the change.
1063 When nothing changes for a full pass, we are done. */
1064
1065 while (something_changed)
1066 {
1067 something_changed = 0;
1068 insn_current_align = MAX_CODE_ALIGN - 1;
1069 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
1070 insn != 0;
1071 insn = NEXT_INSN (insn))
1072 {
1073 int new_length;
1074 #ifdef ADJUST_INSN_LENGTH
1075 int tmp_length;
1076 #endif
1077 int length_align;
1078
1079 uid = INSN_UID (insn);
1080
1081 if (GET_CODE (insn) == CODE_LABEL)
1082 {
1083 int log = LABEL_TO_ALIGNMENT (insn);
1084 if (log > insn_current_align)
1085 {
1086 int align = 1 << log;
1087 int new_address= (insn_current_address + align - 1) & -align;
1088 insn_lengths[uid] = new_address - insn_current_address;
1089 insn_current_align = log;
1090 insn_current_address = new_address;
1091 }
1092 else
1093 insn_lengths[uid] = 0;
1094 INSN_ADDRESSES (uid) = insn_current_address;
1095 continue;
1096 }
1097
1098 length_align = INSN_LENGTH_ALIGNMENT (insn);
1099 if (length_align < insn_current_align)
1100 insn_current_align = length_align;
1101
1102 insn_last_address = INSN_ADDRESSES (uid);
1103 INSN_ADDRESSES (uid) = insn_current_address;
1104
1105 #ifdef CASE_VECTOR_SHORTEN_MODE
1106 if (optimize && GET_CODE (insn) == JUMP_INSN
1107 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1108 {
1109 rtx body = PATTERN (insn);
1110 int old_length = insn_lengths[uid];
1111 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1112 rtx min_lab = XEXP (XEXP (body, 2), 0);
1113 rtx max_lab = XEXP (XEXP (body, 3), 0);
1114 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1115 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1116 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1117 rtx prev;
1118 int rel_align = 0;
1119 addr_diff_vec_flags flags;
1120
1121 /* Avoid automatic aggregate initialization. */
1122 flags = ADDR_DIFF_VEC_FLAGS (body);
1123
1124 /* Try to find a known alignment for rel_lab. */
1125 for (prev = rel_lab;
1126 prev
1127 && ! insn_lengths[INSN_UID (prev)]
1128 && ! (varying_length[INSN_UID (prev)] & 1);
1129 prev = PREV_INSN (prev))
1130 if (varying_length[INSN_UID (prev)] & 2)
1131 {
1132 rel_align = LABEL_TO_ALIGNMENT (prev);
1133 break;
1134 }
1135
1136 /* See the comment on addr_diff_vec_flags in rtl.h for the
1137 meaning of the flags values. base: REL_LAB vec: INSN */
1138 /* Anything after INSN has still addresses from the last
1139 pass; adjust these so that they reflect our current
1140 estimate for this pass. */
1141 if (flags.base_after_vec)
1142 rel_addr += insn_current_address - insn_last_address;
1143 if (flags.min_after_vec)
1144 min_addr += insn_current_address - insn_last_address;
1145 if (flags.max_after_vec)
1146 max_addr += insn_current_address - insn_last_address;
1147 /* We want to know the worst case, i.e. lowest possible value
1148 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1149 its offset is positive, and we have to be wary of code shrink;
1150 otherwise, it is negative, and we have to be vary of code
1151 size increase. */
1152 if (flags.min_after_base)
1153 {
1154 /* If INSN is between REL_LAB and MIN_LAB, the size
1155 changes we are about to make can change the alignment
1156 within the observed offset, therefore we have to break
1157 it up into two parts that are independent. */
1158 if (! flags.base_after_vec && flags.min_after_vec)
1159 {
1160 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1161 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1162 }
1163 else
1164 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1165 }
1166 else
1167 {
1168 if (flags.base_after_vec && ! flags.min_after_vec)
1169 {
1170 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1171 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1172 }
1173 else
1174 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1175 }
1176 /* Likewise, determine the highest lowest possible value
1177 for the offset of MAX_LAB. */
1178 if (flags.max_after_base)
1179 {
1180 if (! flags.base_after_vec && flags.max_after_vec)
1181 {
1182 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1183 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1184 }
1185 else
1186 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1187 }
1188 else
1189 {
1190 if (flags.base_after_vec && ! flags.max_after_vec)
1191 {
1192 max_addr += align_fuzz (max_lab, insn, 0, 0);
1193 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1194 }
1195 else
1196 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1197 }
1198 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1199 max_addr - rel_addr,
1200 body));
1201 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1202 {
1203 insn_lengths[uid]
1204 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1205 insn_current_address += insn_lengths[uid];
1206 if (insn_lengths[uid] != old_length)
1207 something_changed = 1;
1208 }
1209
1210 continue;
1211 }
1212 #endif /* CASE_VECTOR_SHORTEN_MODE */
1213
1214 if (! (varying_length[uid]))
1215 {
1216 if (GET_CODE (insn) == INSN
1217 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1218 {
1219 int i;
1220
1221 body = PATTERN (insn);
1222 for (i = 0; i < XVECLEN (body, 0); i++)
1223 {
1224 rtx inner_insn = XVECEXP (body, 0, i);
1225 int inner_uid = INSN_UID (inner_insn);
1226
1227 INSN_ADDRESSES (inner_uid) = insn_current_address;
1228
1229 insn_current_address += insn_lengths[inner_uid];
1230 }
1231 }
1232 else
1233 insn_current_address += insn_lengths[uid];
1234
1235 continue;
1236 }
1237
1238 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1239 {
1240 int i;
1241
1242 body = PATTERN (insn);
1243 new_length = 0;
1244 for (i = 0; i < XVECLEN (body, 0); i++)
1245 {
1246 rtx inner_insn = XVECEXP (body, 0, i);
1247 int inner_uid = INSN_UID (inner_insn);
1248 int inner_length;
1249
1250 INSN_ADDRESSES (inner_uid) = insn_current_address;
1251
1252 /* insn_current_length returns 0 for insns with a
1253 non-varying length. */
1254 if (! varying_length[inner_uid])
1255 inner_length = insn_lengths[inner_uid];
1256 else
1257 inner_length = insn_current_length (inner_insn);
1258
1259 if (inner_length != insn_lengths[inner_uid])
1260 {
1261 insn_lengths[inner_uid] = inner_length;
1262 something_changed = 1;
1263 }
1264 insn_current_address += insn_lengths[inner_uid];
1265 new_length += inner_length;
1266 }
1267 }
1268 else
1269 {
1270 new_length = insn_current_length (insn);
1271 insn_current_address += new_length;
1272 }
1273
1274 #ifdef ADJUST_INSN_LENGTH
1275 /* If needed, do any adjustment. */
1276 tmp_length = new_length;
1277 ADJUST_INSN_LENGTH (insn, new_length);
1278 insn_current_address += (new_length - tmp_length);
1279 #endif
1280
1281 if (new_length != insn_lengths[uid])
1282 {
1283 insn_lengths[uid] = new_length;
1284 something_changed = 1;
1285 }
1286 }
1287 /* For a non-optimizing compile, do only a single pass. */
1288 if (!optimize)
1289 break;
1290 }
1291
1292 free (varying_length);
1293
1294 #endif /* HAVE_ATTR_length */
1295 }
1296
1297 #ifdef HAVE_ATTR_length
1298 /* Given the body of an INSN known to be generated by an ASM statement, return
1299 the number of machine instructions likely to be generated for this insn.
1300 This is used to compute its length. */
1301
1302 static int
1303 asm_insn_count (rtx body)
1304 {
1305 const char *template;
1306 int count = 1;
1307
1308 if (GET_CODE (body) == ASM_INPUT)
1309 template = XSTR (body, 0);
1310 else
1311 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1312
1313 for (; *template; template++)
1314 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1315 count++;
1316
1317 return count;
1318 }
1319 #endif
1320 \f
1321 /* Output assembler code for the start of a function,
1322 and initialize some of the variables in this file
1323 for the new function. The label for the function and associated
1324 assembler pseudo-ops have already been output in `assemble_start_function'.
1325
1326 FIRST is the first insn of the rtl for the function being compiled.
1327 FILE is the file to write assembler code to.
1328 OPTIMIZE is nonzero if we should eliminate redundant
1329 test and compare insns. */
1330
1331 void
1332 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1333 int optimize ATTRIBUTE_UNUSED)
1334 {
1335 block_depth = 0;
1336
1337 this_is_asm_operands = 0;
1338
1339 last_filename = locator_file (prologue_locator);
1340 last_linenum = locator_line (prologue_locator);
1341
1342 high_block_linenum = high_function_linenum = last_linenum;
1343
1344 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1345
1346 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1347 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1348 dwarf2out_begin_prologue (0, NULL);
1349 #endif
1350
1351 #ifdef LEAF_REG_REMAP
1352 if (current_function_uses_only_leaf_regs)
1353 leaf_renumber_regs (first);
1354 #endif
1355
1356 /* The Sun386i and perhaps other machines don't work right
1357 if the profiling code comes after the prologue. */
1358 #ifdef PROFILE_BEFORE_PROLOGUE
1359 if (current_function_profile)
1360 profile_function (file);
1361 #endif /* PROFILE_BEFORE_PROLOGUE */
1362
1363 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1364 if (dwarf2out_do_frame ())
1365 dwarf2out_frame_debug (NULL_RTX);
1366 #endif
1367
1368 /* If debugging, assign block numbers to all of the blocks in this
1369 function. */
1370 if (write_symbols)
1371 {
1372 remove_unnecessary_notes ();
1373 reemit_insn_block_notes ();
1374 number_blocks (current_function_decl);
1375 /* We never actually put out begin/end notes for the top-level
1376 block in the function. But, conceptually, that block is
1377 always needed. */
1378 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1379 }
1380
1381 /* First output the function prologue: code to set up the stack frame. */
1382 (*targetm.asm_out.function_prologue) (file, get_frame_size ());
1383
1384 /* If the machine represents the prologue as RTL, the profiling code must
1385 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1386 #ifdef HAVE_prologue
1387 if (! HAVE_prologue)
1388 #endif
1389 profile_after_prologue (file);
1390 }
1391
1392 static void
1393 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1394 {
1395 #ifndef PROFILE_BEFORE_PROLOGUE
1396 if (current_function_profile)
1397 profile_function (file);
1398 #endif /* not PROFILE_BEFORE_PROLOGUE */
1399 }
1400
1401 static void
1402 profile_function (FILE *file ATTRIBUTE_UNUSED)
1403 {
1404 #ifndef NO_PROFILE_COUNTERS
1405 # define NO_PROFILE_COUNTERS 0
1406 #endif
1407 #if defined(ASM_OUTPUT_REG_PUSH)
1408 int sval = current_function_returns_struct;
1409 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1410 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1411 int cxt = current_function_needs_context;
1412 #endif
1413 #endif /* ASM_OUTPUT_REG_PUSH */
1414
1415 if (! NO_PROFILE_COUNTERS)
1416 {
1417 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1418 data_section ();
1419 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1420 (*targetm.asm_out.internal_label) (file, "LP", current_function_funcdef_no);
1421 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1422 }
1423
1424 function_section (current_function_decl);
1425
1426 #if defined(ASM_OUTPUT_REG_PUSH)
1427 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1428 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1429 #endif
1430
1431 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1432 if (cxt)
1433 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1434 #else
1435 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1436 if (cxt)
1437 {
1438 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1439 }
1440 #endif
1441 #endif
1442
1443 FUNCTION_PROFILER (file, current_function_funcdef_no);
1444
1445 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1446 if (cxt)
1447 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1448 #else
1449 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1450 if (cxt)
1451 {
1452 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1453 }
1454 #endif
1455 #endif
1456
1457 #if defined(ASM_OUTPUT_REG_PUSH)
1458 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1459 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1460 #endif
1461 }
1462
1463 /* Output assembler code for the end of a function.
1464 For clarity, args are same as those of `final_start_function'
1465 even though not all of them are needed. */
1466
1467 void
1468 final_end_function (void)
1469 {
1470 app_disable ();
1471
1472 (*debug_hooks->end_function) (high_function_linenum);
1473
1474 /* Finally, output the function epilogue:
1475 code to restore the stack frame and return to the caller. */
1476 (*targetm.asm_out.function_epilogue) (asm_out_file, get_frame_size ());
1477
1478 /* And debug output. */
1479 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1480
1481 #if defined (DWARF2_UNWIND_INFO)
1482 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1483 && dwarf2out_do_frame ())
1484 dwarf2out_end_epilogue (last_linenum, last_filename);
1485 #endif
1486 }
1487 \f
1488 /* Output assembler code for some insns: all or part of a function.
1489 For description of args, see `final_start_function', above.
1490
1491 PRESCAN is 1 if we are not really outputting,
1492 just scanning as if we were outputting.
1493 Prescanning deletes and rearranges insns just like ordinary output.
1494 PRESCAN is -2 if we are outputting after having prescanned.
1495 In this case, don't try to delete or rearrange insns
1496 because that has already been done.
1497 Prescanning is done only on certain machines. */
1498
1499 void
1500 final (rtx first, FILE *file, int optimize, int prescan)
1501 {
1502 rtx insn;
1503 int max_line = 0;
1504 int max_uid = 0;
1505
1506 last_ignored_compare = 0;
1507
1508 /* Make a map indicating which line numbers appear in this function.
1509 When producing SDB debugging info, delete troublesome line number
1510 notes from inlined functions in other files as well as duplicate
1511 line number notes. */
1512 #ifdef SDB_DEBUGGING_INFO
1513 if (write_symbols == SDB_DEBUG)
1514 {
1515 rtx last = 0;
1516 for (insn = first; insn; insn = NEXT_INSN (insn))
1517 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1518 {
1519 if ((RTX_INTEGRATED_P (insn)
1520 && strcmp (NOTE_SOURCE_FILE (insn), main_input_filename) != 0)
1521 || (last != 0
1522 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1523 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)))
1524 {
1525 delete_insn (insn); /* Use delete_note. */
1526 continue;
1527 }
1528 last = insn;
1529 if (NOTE_LINE_NUMBER (insn) > max_line)
1530 max_line = NOTE_LINE_NUMBER (insn);
1531 }
1532 }
1533 else
1534 #endif
1535 {
1536 for (insn = first; insn; insn = NEXT_INSN (insn))
1537 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > max_line)
1538 max_line = NOTE_LINE_NUMBER (insn);
1539 }
1540
1541 line_note_exists = xcalloc (max_line + 1, sizeof (char));
1542
1543 for (insn = first; insn; insn = NEXT_INSN (insn))
1544 {
1545 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1546 max_uid = INSN_UID (insn);
1547 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1548 line_note_exists[NOTE_LINE_NUMBER (insn)] = 1;
1549 #ifdef HAVE_cc0
1550 /* If CC tracking across branches is enabled, record the insn which
1551 jumps to each branch only reached from one place. */
1552 if (optimize && GET_CODE (insn) == JUMP_INSN)
1553 {
1554 rtx lab = JUMP_LABEL (insn);
1555 if (lab && LABEL_NUSES (lab) == 1)
1556 {
1557 LABEL_REFS (lab) = insn;
1558 }
1559 }
1560 #endif
1561 }
1562
1563 init_recog ();
1564
1565 CC_STATUS_INIT;
1566
1567 /* Output the insns. */
1568 for (insn = NEXT_INSN (first); insn;)
1569 {
1570 #ifdef HAVE_ATTR_length
1571 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1572 {
1573 /* This can be triggered by bugs elsewhere in the compiler if
1574 new insns are created after init_insn_lengths is called. */
1575 if (GET_CODE (insn) == NOTE)
1576 insn_current_address = -1;
1577 else
1578 abort ();
1579 }
1580 else
1581 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1582 #endif /* HAVE_ATTR_length */
1583
1584 insn = final_scan_insn (insn, file, optimize, prescan, 0);
1585 }
1586
1587 free (line_note_exists);
1588 line_note_exists = NULL;
1589 }
1590 \f
1591 const char *
1592 get_insn_template (int code, rtx insn)
1593 {
1594 const void *output = insn_data[code].output;
1595 switch (insn_data[code].output_format)
1596 {
1597 case INSN_OUTPUT_FORMAT_SINGLE:
1598 return (const char *) output;
1599 case INSN_OUTPUT_FORMAT_MULTI:
1600 return ((const char *const *) output)[which_alternative];
1601 case INSN_OUTPUT_FORMAT_FUNCTION:
1602 if (insn == NULL)
1603 abort ();
1604 return (*(insn_output_fn) output) (recog_data.operand, insn);
1605
1606 default:
1607 abort ();
1608 }
1609 }
1610
1611 /* Emit the appropriate declaration for an alternate-entry-point
1612 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1613 LABEL_KIND != LABEL_NORMAL.
1614
1615 The case fall-through in this function is intentional. */
1616 static void
1617 output_alternate_entry_point (FILE *file, rtx insn)
1618 {
1619 const char *name = LABEL_NAME (insn);
1620
1621 switch (LABEL_KIND (insn))
1622 {
1623 case LABEL_WEAK_ENTRY:
1624 #ifdef ASM_WEAKEN_LABEL
1625 ASM_WEAKEN_LABEL (file, name);
1626 #endif
1627 case LABEL_GLOBAL_ENTRY:
1628 (*targetm.asm_out.globalize_label) (file, name);
1629 case LABEL_STATIC_ENTRY:
1630 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1631 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1632 #endif
1633 ASM_OUTPUT_LABEL (file, name);
1634 break;
1635
1636 case LABEL_NORMAL:
1637 default:
1638 abort ();
1639 }
1640 }
1641
1642 /* The final scan for one insn, INSN.
1643 Args are same as in `final', except that INSN
1644 is the insn being scanned.
1645 Value returned is the next insn to be scanned.
1646
1647 NOPEEPHOLES is the flag to disallow peephole processing (currently
1648 used for within delayed branch sequence output). */
1649
1650 rtx
1651 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1652 int prescan, int nopeepholes ATTRIBUTE_UNUSED)
1653 {
1654 #ifdef HAVE_cc0
1655 rtx set;
1656 #endif
1657
1658 insn_counter++;
1659
1660 /* Ignore deleted insns. These can occur when we split insns (due to a
1661 template of "#") while not optimizing. */
1662 if (INSN_DELETED_P (insn))
1663 return NEXT_INSN (insn);
1664
1665 switch (GET_CODE (insn))
1666 {
1667 case NOTE:
1668 if (prescan > 0)
1669 break;
1670
1671 switch (NOTE_LINE_NUMBER (insn))
1672 {
1673 case NOTE_INSN_DELETED:
1674 case NOTE_INSN_LOOP_BEG:
1675 case NOTE_INSN_LOOP_END:
1676 case NOTE_INSN_LOOP_END_TOP_COND:
1677 case NOTE_INSN_LOOP_CONT:
1678 case NOTE_INSN_LOOP_VTOP:
1679 case NOTE_INSN_FUNCTION_END:
1680 case NOTE_INSN_REPEATED_LINE_NUMBER:
1681 case NOTE_INSN_EXPECTED_VALUE:
1682 break;
1683
1684 case NOTE_INSN_BASIC_BLOCK:
1685 #ifdef IA64_UNWIND_INFO
1686 IA64_UNWIND_EMIT (asm_out_file, insn);
1687 #endif
1688 if (flag_debug_asm)
1689 fprintf (asm_out_file, "\t%s basic block %d\n",
1690 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1691 break;
1692
1693 case NOTE_INSN_EH_REGION_BEG:
1694 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1695 NOTE_EH_HANDLER (insn));
1696 break;
1697
1698 case NOTE_INSN_EH_REGION_END:
1699 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1700 NOTE_EH_HANDLER (insn));
1701 break;
1702
1703 case NOTE_INSN_PROLOGUE_END:
1704 (*targetm.asm_out.function_end_prologue) (file);
1705 profile_after_prologue (file);
1706 break;
1707
1708 case NOTE_INSN_EPILOGUE_BEG:
1709 (*targetm.asm_out.function_begin_epilogue) (file);
1710 break;
1711
1712 case NOTE_INSN_FUNCTION_BEG:
1713 app_disable ();
1714 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1715 break;
1716
1717 case NOTE_INSN_BLOCK_BEG:
1718 if (debug_info_level == DINFO_LEVEL_NORMAL
1719 || debug_info_level == DINFO_LEVEL_VERBOSE
1720 || write_symbols == DWARF_DEBUG
1721 || write_symbols == DWARF2_DEBUG
1722 || write_symbols == VMS_AND_DWARF2_DEBUG
1723 || write_symbols == VMS_DEBUG)
1724 {
1725 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1726
1727 app_disable ();
1728 ++block_depth;
1729 high_block_linenum = last_linenum;
1730
1731 /* Output debugging info about the symbol-block beginning. */
1732 (*debug_hooks->begin_block) (last_linenum, n);
1733
1734 /* Mark this block as output. */
1735 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1736 }
1737 break;
1738
1739 case NOTE_INSN_BLOCK_END:
1740 if (debug_info_level == DINFO_LEVEL_NORMAL
1741 || debug_info_level == DINFO_LEVEL_VERBOSE
1742 || write_symbols == DWARF_DEBUG
1743 || write_symbols == DWARF2_DEBUG
1744 || write_symbols == VMS_AND_DWARF2_DEBUG
1745 || write_symbols == VMS_DEBUG)
1746 {
1747 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1748
1749 app_disable ();
1750
1751 /* End of a symbol-block. */
1752 --block_depth;
1753 if (block_depth < 0)
1754 abort ();
1755
1756 (*debug_hooks->end_block) (high_block_linenum, n);
1757 }
1758 break;
1759
1760 case NOTE_INSN_DELETED_LABEL:
1761 /* Emit the label. We may have deleted the CODE_LABEL because
1762 the label could be proved to be unreachable, though still
1763 referenced (in the form of having its address taken. */
1764 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1765 break;
1766
1767 case 0:
1768 break;
1769
1770 default:
1771 if (NOTE_LINE_NUMBER (insn) <= 0)
1772 abort ();
1773 break;
1774 }
1775 break;
1776
1777 case BARRIER:
1778 #if defined (DWARF2_UNWIND_INFO)
1779 if (dwarf2out_do_frame ())
1780 dwarf2out_frame_debug (insn);
1781 #endif
1782 break;
1783
1784 case CODE_LABEL:
1785 /* The target port might emit labels in the output function for
1786 some insn, e.g. sh.c output_branchy_insn. */
1787 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1788 {
1789 int align = LABEL_TO_ALIGNMENT (insn);
1790 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1791 int max_skip = LABEL_TO_MAX_SKIP (insn);
1792 #endif
1793
1794 if (align && NEXT_INSN (insn))
1795 {
1796 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1797 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1798 #else
1799 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1800 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1801 #else
1802 ASM_OUTPUT_ALIGN (file, align);
1803 #endif
1804 #endif
1805 }
1806 }
1807 #ifdef HAVE_cc0
1808 CC_STATUS_INIT;
1809 /* If this label is reached from only one place, set the condition
1810 codes from the instruction just before the branch. */
1811
1812 /* Disabled because some insns set cc_status in the C output code
1813 and NOTICE_UPDATE_CC alone can set incorrect status. */
1814 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1815 {
1816 rtx jump = LABEL_REFS (insn);
1817 rtx barrier = prev_nonnote_insn (insn);
1818 rtx prev;
1819 /* If the LABEL_REFS field of this label has been set to point
1820 at a branch, the predecessor of the branch is a regular
1821 insn, and that branch is the only way to reach this label,
1822 set the condition codes based on the branch and its
1823 predecessor. */
1824 if (barrier && GET_CODE (barrier) == BARRIER
1825 && jump && GET_CODE (jump) == JUMP_INSN
1826 && (prev = prev_nonnote_insn (jump))
1827 && GET_CODE (prev) == INSN)
1828 {
1829 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1830 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1831 }
1832 }
1833 #endif
1834 if (prescan > 0)
1835 break;
1836
1837 #ifdef FINAL_PRESCAN_LABEL
1838 FINAL_PRESCAN_INSN (insn, NULL, 0);
1839 #endif
1840
1841 if (LABEL_NAME (insn))
1842 (*debug_hooks->label) (insn);
1843
1844 if (app_on)
1845 {
1846 fputs (ASM_APP_OFF, file);
1847 app_on = 0;
1848 }
1849 if (NEXT_INSN (insn) != 0
1850 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
1851 {
1852 rtx nextbody = PATTERN (NEXT_INSN (insn));
1853
1854 /* If this label is followed by a jump-table,
1855 make sure we put the label in the read-only section. Also
1856 possibly write the label and jump table together. */
1857
1858 if (GET_CODE (nextbody) == ADDR_VEC
1859 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1860 {
1861 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1862 /* In this case, the case vector is being moved by the
1863 target, so don't output the label at all. Leave that
1864 to the back end macros. */
1865 #else
1866 if (! JUMP_TABLES_IN_TEXT_SECTION)
1867 {
1868 int log_align;
1869
1870 readonly_data_section ();
1871
1872 #ifdef ADDR_VEC_ALIGN
1873 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1874 #else
1875 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1876 #endif
1877 ASM_OUTPUT_ALIGN (file, log_align);
1878 }
1879 else
1880 function_section (current_function_decl);
1881
1882 #ifdef ASM_OUTPUT_CASE_LABEL
1883 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1884 NEXT_INSN (insn));
1885 #else
1886 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1887 #endif
1888 #endif
1889 break;
1890 }
1891 }
1892 if (LABEL_ALT_ENTRY_P (insn))
1893 output_alternate_entry_point (file, insn);
1894 else
1895 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1896 break;
1897
1898 default:
1899 {
1900 rtx body = PATTERN (insn);
1901 int insn_code_number;
1902 const char *template;
1903 rtx note;
1904
1905 /* An INSN, JUMP_INSN or CALL_INSN.
1906 First check for special kinds that recog doesn't recognize. */
1907
1908 if (GET_CODE (body) == USE /* These are just declarations */
1909 || GET_CODE (body) == CLOBBER)
1910 break;
1911
1912 #ifdef HAVE_cc0
1913 /* If there is a REG_CC_SETTER note on this insn, it means that
1914 the setting of the condition code was done in the delay slot
1915 of the insn that branched here. So recover the cc status
1916 from the insn that set it. */
1917
1918 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1919 if (note)
1920 {
1921 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1922 cc_prev_status = cc_status;
1923 }
1924 #endif
1925
1926 /* Detect insns that are really jump-tables
1927 and output them as such. */
1928
1929 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1930 {
1931 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1932 int vlen, idx;
1933 #endif
1934
1935 if (prescan > 0)
1936 break;
1937
1938 if (app_on)
1939 {
1940 fputs (ASM_APP_OFF, file);
1941 app_on = 0;
1942 }
1943
1944 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1945 if (GET_CODE (body) == ADDR_VEC)
1946 {
1947 #ifdef ASM_OUTPUT_ADDR_VEC
1948 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
1949 #else
1950 abort ();
1951 #endif
1952 }
1953 else
1954 {
1955 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1956 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
1957 #else
1958 abort ();
1959 #endif
1960 }
1961 #else
1962 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
1963 for (idx = 0; idx < vlen; idx++)
1964 {
1965 if (GET_CODE (body) == ADDR_VEC)
1966 {
1967 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1968 ASM_OUTPUT_ADDR_VEC_ELT
1969 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
1970 #else
1971 abort ();
1972 #endif
1973 }
1974 else
1975 {
1976 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1977 ASM_OUTPUT_ADDR_DIFF_ELT
1978 (file,
1979 body,
1980 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
1981 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
1982 #else
1983 abort ();
1984 #endif
1985 }
1986 }
1987 #ifdef ASM_OUTPUT_CASE_END
1988 ASM_OUTPUT_CASE_END (file,
1989 CODE_LABEL_NUMBER (PREV_INSN (insn)),
1990 insn);
1991 #endif
1992 #endif
1993
1994 function_section (current_function_decl);
1995
1996 break;
1997 }
1998 /* Output this line note if it is the first or the last line
1999 note in a row. */
2000 if (notice_source_line (insn))
2001 {
2002 (*debug_hooks->source_line) (last_linenum, last_filename);
2003 }
2004
2005 if (GET_CODE (body) == ASM_INPUT)
2006 {
2007 const char *string = XSTR (body, 0);
2008
2009 /* There's no telling what that did to the condition codes. */
2010 CC_STATUS_INIT;
2011 if (prescan > 0)
2012 break;
2013
2014 if (string[0])
2015 {
2016 if (! app_on)
2017 {
2018 fputs (ASM_APP_ON, file);
2019 app_on = 1;
2020 }
2021 fprintf (asm_out_file, "\t%s\n", string);
2022 }
2023 break;
2024 }
2025
2026 /* Detect `asm' construct with operands. */
2027 if (asm_noperands (body) >= 0)
2028 {
2029 unsigned int noperands = asm_noperands (body);
2030 rtx *ops = alloca (noperands * sizeof (rtx));
2031 const char *string;
2032
2033 /* There's no telling what that did to the condition codes. */
2034 CC_STATUS_INIT;
2035 if (prescan > 0)
2036 break;
2037
2038 /* Get out the operand values. */
2039 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2040 /* Inhibit aborts on what would otherwise be compiler bugs. */
2041 insn_noperands = noperands;
2042 this_is_asm_operands = insn;
2043
2044 #ifdef FINAL_PRESCAN_INSN
2045 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2046 #endif
2047
2048 /* Output the insn using them. */
2049 if (string[0])
2050 {
2051 if (! app_on)
2052 {
2053 fputs (ASM_APP_ON, file);
2054 app_on = 1;
2055 }
2056 output_asm_insn (string, ops);
2057 }
2058
2059 this_is_asm_operands = 0;
2060 break;
2061 }
2062
2063 if (prescan <= 0 && app_on)
2064 {
2065 fputs (ASM_APP_OFF, file);
2066 app_on = 0;
2067 }
2068
2069 if (GET_CODE (body) == SEQUENCE)
2070 {
2071 /* A delayed-branch sequence */
2072 int i;
2073 rtx next;
2074
2075 if (prescan > 0)
2076 break;
2077 final_sequence = body;
2078
2079 /* Record the delay slots' frame information before the branch.
2080 This is needed for delayed calls: see execute_cfa_program(). */
2081 #if defined (DWARF2_UNWIND_INFO)
2082 if (dwarf2out_do_frame ())
2083 for (i = 1; i < XVECLEN (body, 0); i++)
2084 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2085 #endif
2086
2087 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2088 force the restoration of a comparison that was previously
2089 thought unnecessary. If that happens, cancel this sequence
2090 and cause that insn to be restored. */
2091
2092 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1);
2093 if (next != XVECEXP (body, 0, 1))
2094 {
2095 final_sequence = 0;
2096 return next;
2097 }
2098
2099 for (i = 1; i < XVECLEN (body, 0); i++)
2100 {
2101 rtx insn = XVECEXP (body, 0, i);
2102 rtx next = NEXT_INSN (insn);
2103 /* We loop in case any instruction in a delay slot gets
2104 split. */
2105 do
2106 insn = final_scan_insn (insn, file, 0, prescan, 1);
2107 while (insn != next);
2108 }
2109 #ifdef DBR_OUTPUT_SEQEND
2110 DBR_OUTPUT_SEQEND (file);
2111 #endif
2112 final_sequence = 0;
2113
2114 /* If the insn requiring the delay slot was a CALL_INSN, the
2115 insns in the delay slot are actually executed before the
2116 called function. Hence we don't preserve any CC-setting
2117 actions in these insns and the CC must be marked as being
2118 clobbered by the function. */
2119 if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
2120 {
2121 CC_STATUS_INIT;
2122 }
2123 break;
2124 }
2125
2126 /* We have a real machine instruction as rtl. */
2127
2128 body = PATTERN (insn);
2129
2130 #ifdef HAVE_cc0
2131 set = single_set (insn);
2132
2133 /* Check for redundant test and compare instructions
2134 (when the condition codes are already set up as desired).
2135 This is done only when optimizing; if not optimizing,
2136 it should be possible for the user to alter a variable
2137 with the debugger in between statements
2138 and the next statement should reexamine the variable
2139 to compute the condition codes. */
2140
2141 if (optimize)
2142 {
2143 if (set
2144 && GET_CODE (SET_DEST (set)) == CC0
2145 && insn != last_ignored_compare)
2146 {
2147 if (GET_CODE (SET_SRC (set)) == SUBREG)
2148 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2149 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2150 {
2151 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2152 XEXP (SET_SRC (set), 0)
2153 = alter_subreg (&XEXP (SET_SRC (set), 0));
2154 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2155 XEXP (SET_SRC (set), 1)
2156 = alter_subreg (&XEXP (SET_SRC (set), 1));
2157 }
2158 if ((cc_status.value1 != 0
2159 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2160 || (cc_status.value2 != 0
2161 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2162 {
2163 /* Don't delete insn if it has an addressing side-effect. */
2164 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2165 /* or if anything in it is volatile. */
2166 && ! volatile_refs_p (PATTERN (insn)))
2167 {
2168 /* We don't really delete the insn; just ignore it. */
2169 last_ignored_compare = insn;
2170 break;
2171 }
2172 }
2173 }
2174 }
2175 #endif
2176
2177 #ifndef STACK_REGS
2178 /* Don't bother outputting obvious no-ops, even without -O.
2179 This optimization is fast and doesn't interfere with debugging.
2180 Don't do this if the insn is in a delay slot, since this
2181 will cause an improper number of delay insns to be written. */
2182 if (final_sequence == 0
2183 && prescan >= 0
2184 && GET_CODE (insn) == INSN && GET_CODE (body) == SET
2185 && GET_CODE (SET_SRC (body)) == REG
2186 && GET_CODE (SET_DEST (body)) == REG
2187 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2188 break;
2189 #endif
2190
2191 #ifdef HAVE_cc0
2192 /* If this is a conditional branch, maybe modify it
2193 if the cc's are in a nonstandard state
2194 so that it accomplishes the same thing that it would
2195 do straightforwardly if the cc's were set up normally. */
2196
2197 if (cc_status.flags != 0
2198 && GET_CODE (insn) == JUMP_INSN
2199 && GET_CODE (body) == SET
2200 && SET_DEST (body) == pc_rtx
2201 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2202 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body), 0))) == '<'
2203 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2204 /* This is done during prescan; it is not done again
2205 in final scan when prescan has been done. */
2206 && prescan >= 0)
2207 {
2208 /* This function may alter the contents of its argument
2209 and clear some of the cc_status.flags bits.
2210 It may also return 1 meaning condition now always true
2211 or -1 meaning condition now always false
2212 or 2 meaning condition nontrivial but altered. */
2213 int result = alter_cond (XEXP (SET_SRC (body), 0));
2214 /* If condition now has fixed value, replace the IF_THEN_ELSE
2215 with its then-operand or its else-operand. */
2216 if (result == 1)
2217 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2218 if (result == -1)
2219 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2220
2221 /* The jump is now either unconditional or a no-op.
2222 If it has become a no-op, don't try to output it.
2223 (It would not be recognized.) */
2224 if (SET_SRC (body) == pc_rtx)
2225 {
2226 delete_insn (insn);
2227 break;
2228 }
2229 else if (GET_CODE (SET_SRC (body)) == RETURN)
2230 /* Replace (set (pc) (return)) with (return). */
2231 PATTERN (insn) = body = SET_SRC (body);
2232
2233 /* Rerecognize the instruction if it has changed. */
2234 if (result != 0)
2235 INSN_CODE (insn) = -1;
2236 }
2237
2238 /* Make same adjustments to instructions that examine the
2239 condition codes without jumping and instructions that
2240 handle conditional moves (if this machine has either one). */
2241
2242 if (cc_status.flags != 0
2243 && set != 0)
2244 {
2245 rtx cond_rtx, then_rtx, else_rtx;
2246
2247 if (GET_CODE (insn) != JUMP_INSN
2248 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2249 {
2250 cond_rtx = XEXP (SET_SRC (set), 0);
2251 then_rtx = XEXP (SET_SRC (set), 1);
2252 else_rtx = XEXP (SET_SRC (set), 2);
2253 }
2254 else
2255 {
2256 cond_rtx = SET_SRC (set);
2257 then_rtx = const_true_rtx;
2258 else_rtx = const0_rtx;
2259 }
2260
2261 switch (GET_CODE (cond_rtx))
2262 {
2263 case GTU:
2264 case GT:
2265 case LTU:
2266 case LT:
2267 case GEU:
2268 case GE:
2269 case LEU:
2270 case LE:
2271 case EQ:
2272 case NE:
2273 {
2274 int result;
2275 if (XEXP (cond_rtx, 0) != cc0_rtx)
2276 break;
2277 result = alter_cond (cond_rtx);
2278 if (result == 1)
2279 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2280 else if (result == -1)
2281 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2282 else if (result == 2)
2283 INSN_CODE (insn) = -1;
2284 if (SET_DEST (set) == SET_SRC (set))
2285 delete_insn (insn);
2286 }
2287 break;
2288
2289 default:
2290 break;
2291 }
2292 }
2293
2294 #endif
2295
2296 #ifdef HAVE_peephole
2297 /* Do machine-specific peephole optimizations if desired. */
2298
2299 if (optimize && !flag_no_peephole && !nopeepholes)
2300 {
2301 rtx next = peephole (insn);
2302 /* When peepholing, if there were notes within the peephole,
2303 emit them before the peephole. */
2304 if (next != 0 && next != NEXT_INSN (insn))
2305 {
2306 rtx prev = PREV_INSN (insn);
2307
2308 for (note = NEXT_INSN (insn); note != next;
2309 note = NEXT_INSN (note))
2310 final_scan_insn (note, file, optimize, prescan, nopeepholes);
2311
2312 /* In case this is prescan, put the notes
2313 in proper position for later rescan. */
2314 note = NEXT_INSN (insn);
2315 PREV_INSN (note) = prev;
2316 NEXT_INSN (prev) = note;
2317 NEXT_INSN (PREV_INSN (next)) = insn;
2318 PREV_INSN (insn) = PREV_INSN (next);
2319 NEXT_INSN (insn) = next;
2320 PREV_INSN (next) = insn;
2321 }
2322
2323 /* PEEPHOLE might have changed this. */
2324 body = PATTERN (insn);
2325 }
2326 #endif
2327
2328 /* Try to recognize the instruction.
2329 If successful, verify that the operands satisfy the
2330 constraints for the instruction. Crash if they don't,
2331 since `reload' should have changed them so that they do. */
2332
2333 insn_code_number = recog_memoized (insn);
2334 cleanup_subreg_operands (insn);
2335
2336 /* Dump the insn in the assembly for debugging. */
2337 if (flag_dump_rtl_in_asm)
2338 {
2339 print_rtx_head = ASM_COMMENT_START;
2340 print_rtl_single (asm_out_file, insn);
2341 print_rtx_head = "";
2342 }
2343
2344 if (! constrain_operands_cached (1))
2345 fatal_insn_not_found (insn);
2346
2347 /* Some target machines need to prescan each insn before
2348 it is output. */
2349
2350 #ifdef FINAL_PRESCAN_INSN
2351 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2352 #endif
2353
2354 #ifdef HAVE_conditional_execution
2355 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2356 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2357 else
2358 current_insn_predicate = NULL_RTX;
2359 #endif
2360
2361 #ifdef HAVE_cc0
2362 cc_prev_status = cc_status;
2363
2364 /* Update `cc_status' for this instruction.
2365 The instruction's output routine may change it further.
2366 If the output routine for a jump insn needs to depend
2367 on the cc status, it should look at cc_prev_status. */
2368
2369 NOTICE_UPDATE_CC (body, insn);
2370 #endif
2371
2372 current_output_insn = debug_insn = insn;
2373
2374 #if defined (DWARF2_UNWIND_INFO)
2375 if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
2376 dwarf2out_frame_debug (insn);
2377 #endif
2378
2379 /* Find the proper template for this insn. */
2380 template = get_insn_template (insn_code_number, insn);
2381
2382 /* If the C code returns 0, it means that it is a jump insn
2383 which follows a deleted test insn, and that test insn
2384 needs to be reinserted. */
2385 if (template == 0)
2386 {
2387 rtx prev;
2388
2389 if (prev_nonnote_insn (insn) != last_ignored_compare)
2390 abort ();
2391
2392 /* We have already processed the notes between the setter and
2393 the user. Make sure we don't process them again, this is
2394 particularly important if one of the notes is a block
2395 scope note or an EH note. */
2396 for (prev = insn;
2397 prev != last_ignored_compare;
2398 prev = PREV_INSN (prev))
2399 {
2400 if (GET_CODE (prev) == NOTE)
2401 delete_insn (prev); /* Use delete_note. */
2402 }
2403
2404 return prev;
2405 }
2406
2407 /* If the template is the string "#", it means that this insn must
2408 be split. */
2409 if (template[0] == '#' && template[1] == '\0')
2410 {
2411 rtx new = try_split (body, insn, 0);
2412
2413 /* If we didn't split the insn, go away. */
2414 if (new == insn && PATTERN (new) == body)
2415 fatal_insn ("could not split insn", insn);
2416
2417 #ifdef HAVE_ATTR_length
2418 /* This instruction should have been split in shorten_branches,
2419 to ensure that we would have valid length info for the
2420 splitees. */
2421 abort ();
2422 #endif
2423
2424 return new;
2425 }
2426
2427 if (prescan > 0)
2428 break;
2429
2430 #ifdef IA64_UNWIND_INFO
2431 IA64_UNWIND_EMIT (asm_out_file, insn);
2432 #endif
2433 /* Output assembler code from the template. */
2434
2435 output_asm_insn (template, recog_data.operand);
2436
2437 /* If necessary, report the effect that the instruction has on
2438 the unwind info. We've already done this for delay slots
2439 and call instructions. */
2440 #if defined (DWARF2_UNWIND_INFO)
2441 if (GET_CODE (insn) == INSN
2442 #if !defined (HAVE_prologue)
2443 && !ACCUMULATE_OUTGOING_ARGS
2444 #endif
2445 && final_sequence == 0
2446 && dwarf2out_do_frame ())
2447 dwarf2out_frame_debug (insn);
2448 #endif
2449
2450 #if 0
2451 /* It's not at all clear why we did this and doing so used to
2452 interfere with tests that used REG_WAS_0 notes, which are
2453 now gone, so let's try with this out. */
2454
2455 /* Mark this insn as having been output. */
2456 INSN_DELETED_P (insn) = 1;
2457 #endif
2458
2459 /* Emit information for vtable gc. */
2460 note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
2461
2462 current_output_insn = debug_insn = 0;
2463 }
2464 }
2465 return NEXT_INSN (insn);
2466 }
2467 \f
2468 /* Output debugging info to the assembler file FILE
2469 based on the NOTE-insn INSN, assumed to be a line number. */
2470
2471 static bool
2472 notice_source_line (rtx insn)
2473 {
2474 const char *filename = insn_file (insn);
2475 int linenum = insn_line (insn);
2476
2477 if (filename && (filename != last_filename || last_linenum != linenum))
2478 {
2479 last_filename = filename;
2480 last_linenum = linenum;
2481 high_block_linenum = MAX (last_linenum, high_block_linenum);
2482 high_function_linenum = MAX (last_linenum, high_function_linenum);
2483 return true;
2484 }
2485 return false;
2486 }
2487 \f
2488 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2489 directly to the desired hard register. */
2490
2491 void
2492 cleanup_subreg_operands (rtx insn)
2493 {
2494 int i;
2495 extract_insn_cached (insn);
2496 for (i = 0; i < recog_data.n_operands; i++)
2497 {
2498 /* The following test cannot use recog_data.operand when testing
2499 for a SUBREG: the underlying object might have been changed
2500 already if we are inside a match_operator expression that
2501 matches the else clause. Instead we test the underlying
2502 expression directly. */
2503 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2504 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2505 else if (GET_CODE (recog_data.operand[i]) == PLUS
2506 || GET_CODE (recog_data.operand[i]) == MULT
2507 || GET_CODE (recog_data.operand[i]) == MEM)
2508 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2509 }
2510
2511 for (i = 0; i < recog_data.n_dups; i++)
2512 {
2513 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2514 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2515 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2516 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2517 || GET_CODE (*recog_data.dup_loc[i]) == MEM)
2518 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2519 }
2520 }
2521
2522 /* If X is a SUBREG, replace it with a REG or a MEM,
2523 based on the thing it is a subreg of. */
2524
2525 rtx
2526 alter_subreg (rtx *xp)
2527 {
2528 rtx x = *xp;
2529 rtx y = SUBREG_REG (x);
2530
2531 /* simplify_subreg does not remove subreg from volatile references.
2532 We are required to. */
2533 if (GET_CODE (y) == MEM)
2534 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2535 else
2536 {
2537 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2538 SUBREG_BYTE (x));
2539
2540 if (new != 0)
2541 *xp = new;
2542 /* Simplify_subreg can't handle some REG cases, but we have to. */
2543 else if (GET_CODE (y) == REG)
2544 {
2545 unsigned int regno = subreg_hard_regno (x, 1);
2546 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2547 }
2548 else
2549 abort ();
2550 }
2551
2552 return *xp;
2553 }
2554
2555 /* Do alter_subreg on all the SUBREGs contained in X. */
2556
2557 static rtx
2558 walk_alter_subreg (rtx *xp)
2559 {
2560 rtx x = *xp;
2561 switch (GET_CODE (x))
2562 {
2563 case PLUS:
2564 case MULT:
2565 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2566 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2567 break;
2568
2569 case MEM:
2570 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2571 break;
2572
2573 case SUBREG:
2574 return alter_subreg (xp);
2575
2576 default:
2577 break;
2578 }
2579
2580 return *xp;
2581 }
2582 \f
2583 #ifdef HAVE_cc0
2584
2585 /* Given BODY, the body of a jump instruction, alter the jump condition
2586 as required by the bits that are set in cc_status.flags.
2587 Not all of the bits there can be handled at this level in all cases.
2588
2589 The value is normally 0.
2590 1 means that the condition has become always true.
2591 -1 means that the condition has become always false.
2592 2 means that COND has been altered. */
2593
2594 static int
2595 alter_cond (rtx cond)
2596 {
2597 int value = 0;
2598
2599 if (cc_status.flags & CC_REVERSED)
2600 {
2601 value = 2;
2602 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2603 }
2604
2605 if (cc_status.flags & CC_INVERTED)
2606 {
2607 value = 2;
2608 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2609 }
2610
2611 if (cc_status.flags & CC_NOT_POSITIVE)
2612 switch (GET_CODE (cond))
2613 {
2614 case LE:
2615 case LEU:
2616 case GEU:
2617 /* Jump becomes unconditional. */
2618 return 1;
2619
2620 case GT:
2621 case GTU:
2622 case LTU:
2623 /* Jump becomes no-op. */
2624 return -1;
2625
2626 case GE:
2627 PUT_CODE (cond, EQ);
2628 value = 2;
2629 break;
2630
2631 case LT:
2632 PUT_CODE (cond, NE);
2633 value = 2;
2634 break;
2635
2636 default:
2637 break;
2638 }
2639
2640 if (cc_status.flags & CC_NOT_NEGATIVE)
2641 switch (GET_CODE (cond))
2642 {
2643 case GE:
2644 case GEU:
2645 /* Jump becomes unconditional. */
2646 return 1;
2647
2648 case LT:
2649 case LTU:
2650 /* Jump becomes no-op. */
2651 return -1;
2652
2653 case LE:
2654 case LEU:
2655 PUT_CODE (cond, EQ);
2656 value = 2;
2657 break;
2658
2659 case GT:
2660 case GTU:
2661 PUT_CODE (cond, NE);
2662 value = 2;
2663 break;
2664
2665 default:
2666 break;
2667 }
2668
2669 if (cc_status.flags & CC_NO_OVERFLOW)
2670 switch (GET_CODE (cond))
2671 {
2672 case GEU:
2673 /* Jump becomes unconditional. */
2674 return 1;
2675
2676 case LEU:
2677 PUT_CODE (cond, EQ);
2678 value = 2;
2679 break;
2680
2681 case GTU:
2682 PUT_CODE (cond, NE);
2683 value = 2;
2684 break;
2685
2686 case LTU:
2687 /* Jump becomes no-op. */
2688 return -1;
2689
2690 default:
2691 break;
2692 }
2693
2694 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2695 switch (GET_CODE (cond))
2696 {
2697 default:
2698 abort ();
2699
2700 case NE:
2701 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2702 value = 2;
2703 break;
2704
2705 case EQ:
2706 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2707 value = 2;
2708 break;
2709 }
2710
2711 if (cc_status.flags & CC_NOT_SIGNED)
2712 /* The flags are valid if signed condition operators are converted
2713 to unsigned. */
2714 switch (GET_CODE (cond))
2715 {
2716 case LE:
2717 PUT_CODE (cond, LEU);
2718 value = 2;
2719 break;
2720
2721 case LT:
2722 PUT_CODE (cond, LTU);
2723 value = 2;
2724 break;
2725
2726 case GT:
2727 PUT_CODE (cond, GTU);
2728 value = 2;
2729 break;
2730
2731 case GE:
2732 PUT_CODE (cond, GEU);
2733 value = 2;
2734 break;
2735
2736 default:
2737 break;
2738 }
2739
2740 return value;
2741 }
2742 #endif
2743 \f
2744 /* Report inconsistency between the assembler template and the operands.
2745 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2746
2747 void
2748 output_operand_lossage (const char *msgid, ...)
2749 {
2750 char *fmt_string;
2751 char *new_message;
2752 const char *pfx_str;
2753 va_list ap;
2754
2755 va_start (ap, msgid);
2756
2757 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2758 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2759 vasprintf (&new_message, fmt_string, ap);
2760
2761 if (this_is_asm_operands)
2762 error_for_asm (this_is_asm_operands, "%s", new_message);
2763 else
2764 internal_error ("%s", new_message);
2765
2766 free (fmt_string);
2767 free (new_message);
2768 va_end (ap);
2769 }
2770 \f
2771 /* Output of assembler code from a template, and its subroutines. */
2772
2773 /* Annotate the assembly with a comment describing the pattern and
2774 alternative used. */
2775
2776 static void
2777 output_asm_name (void)
2778 {
2779 if (debug_insn)
2780 {
2781 int num = INSN_CODE (debug_insn);
2782 fprintf (asm_out_file, "\t%s %d\t%s",
2783 ASM_COMMENT_START, INSN_UID (debug_insn),
2784 insn_data[num].name);
2785 if (insn_data[num].n_alternatives > 1)
2786 fprintf (asm_out_file, "/%d", which_alternative + 1);
2787 #ifdef HAVE_ATTR_length
2788 fprintf (asm_out_file, "\t[length = %d]",
2789 get_attr_length (debug_insn));
2790 #endif
2791 /* Clear this so only the first assembler insn
2792 of any rtl insn will get the special comment for -dp. */
2793 debug_insn = 0;
2794 }
2795 }
2796
2797 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2798 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2799 corresponds to the address of the object and 0 if to the object. */
2800
2801 static tree
2802 get_mem_expr_from_op (rtx op, int *paddressp)
2803 {
2804 tree expr;
2805 int inner_addressp;
2806
2807 *paddressp = 0;
2808
2809 if (GET_CODE (op) == REG)
2810 return REG_EXPR (op);
2811 else if (GET_CODE (op) != MEM)
2812 return 0;
2813
2814 if (MEM_EXPR (op) != 0)
2815 return MEM_EXPR (op);
2816
2817 /* Otherwise we have an address, so indicate it and look at the address. */
2818 *paddressp = 1;
2819 op = XEXP (op, 0);
2820
2821 /* First check if we have a decl for the address, then look at the right side
2822 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2823 But don't allow the address to itself be indirect. */
2824 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2825 return expr;
2826 else if (GET_CODE (op) == PLUS
2827 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2828 return expr;
2829
2830 while (GET_RTX_CLASS (GET_CODE (op)) == '1'
2831 || GET_RTX_CLASS (GET_CODE (op)) == '2')
2832 op = XEXP (op, 0);
2833
2834 expr = get_mem_expr_from_op (op, &inner_addressp);
2835 return inner_addressp ? 0 : expr;
2836 }
2837
2838 /* Output operand names for assembler instructions. OPERANDS is the
2839 operand vector, OPORDER is the order to write the operands, and NOPS
2840 is the number of operands to write. */
2841
2842 static void
2843 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2844 {
2845 int wrote = 0;
2846 int i;
2847
2848 for (i = 0; i < nops; i++)
2849 {
2850 int addressp;
2851 rtx op = operands[oporder[i]];
2852 tree expr = get_mem_expr_from_op (op, &addressp);
2853
2854 fprintf (asm_out_file, "%c%s",
2855 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2856 wrote = 1;
2857 if (expr)
2858 {
2859 fprintf (asm_out_file, "%s",
2860 addressp ? "*" : "");
2861 print_mem_expr (asm_out_file, expr);
2862 wrote = 1;
2863 }
2864 else if (REG_P (op) && ORIGINAL_REGNO (op)
2865 && ORIGINAL_REGNO (op) != REGNO (op))
2866 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2867 }
2868 }
2869
2870 /* Output text from TEMPLATE to the assembler output file,
2871 obeying %-directions to substitute operands taken from
2872 the vector OPERANDS.
2873
2874 %N (for N a digit) means print operand N in usual manner.
2875 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2876 and print the label name with no punctuation.
2877 %cN means require operand N to be a constant
2878 and print the constant expression with no punctuation.
2879 %aN means expect operand N to be a memory address
2880 (not a memory reference!) and print a reference
2881 to that address.
2882 %nN means expect operand N to be a constant
2883 and print a constant expression for minus the value
2884 of the operand, with no other punctuation. */
2885
2886 void
2887 output_asm_insn (const char *template, rtx *operands)
2888 {
2889 const char *p;
2890 int c;
2891 #ifdef ASSEMBLER_DIALECT
2892 int dialect = 0;
2893 #endif
2894 int oporder[MAX_RECOG_OPERANDS];
2895 char opoutput[MAX_RECOG_OPERANDS];
2896 int ops = 0;
2897
2898 /* An insn may return a null string template
2899 in a case where no assembler code is needed. */
2900 if (*template == 0)
2901 return;
2902
2903 memset (opoutput, 0, sizeof opoutput);
2904 p = template;
2905 putc ('\t', asm_out_file);
2906
2907 #ifdef ASM_OUTPUT_OPCODE
2908 ASM_OUTPUT_OPCODE (asm_out_file, p);
2909 #endif
2910
2911 while ((c = *p++))
2912 switch (c)
2913 {
2914 case '\n':
2915 if (flag_verbose_asm)
2916 output_asm_operand_names (operands, oporder, ops);
2917 if (flag_print_asm_name)
2918 output_asm_name ();
2919
2920 ops = 0;
2921 memset (opoutput, 0, sizeof opoutput);
2922
2923 putc (c, asm_out_file);
2924 #ifdef ASM_OUTPUT_OPCODE
2925 while ((c = *p) == '\t')
2926 {
2927 putc (c, asm_out_file);
2928 p++;
2929 }
2930 ASM_OUTPUT_OPCODE (asm_out_file, p);
2931 #endif
2932 break;
2933
2934 #ifdef ASSEMBLER_DIALECT
2935 case '{':
2936 {
2937 int i;
2938
2939 if (dialect)
2940 output_operand_lossage ("nested assembly dialect alternatives");
2941 else
2942 dialect = 1;
2943
2944 /* If we want the first dialect, do nothing. Otherwise, skip
2945 DIALECT_NUMBER of strings ending with '|'. */
2946 for (i = 0; i < dialect_number; i++)
2947 {
2948 while (*p && *p != '}' && *p++ != '|')
2949 ;
2950 if (*p == '}')
2951 break;
2952 if (*p == '|')
2953 p++;
2954 }
2955
2956 if (*p == '\0')
2957 output_operand_lossage ("unterminated assembly dialect alternative");
2958 }
2959 break;
2960
2961 case '|':
2962 if (dialect)
2963 {
2964 /* Skip to close brace. */
2965 do
2966 {
2967 if (*p == '\0')
2968 {
2969 output_operand_lossage ("unterminated assembly dialect alternative");
2970 break;
2971 }
2972 }
2973 while (*p++ != '}');
2974 dialect = 0;
2975 }
2976 else
2977 putc (c, asm_out_file);
2978 break;
2979
2980 case '}':
2981 if (! dialect)
2982 putc (c, asm_out_file);
2983 dialect = 0;
2984 break;
2985 #endif
2986
2987 case '%':
2988 /* %% outputs a single %. */
2989 if (*p == '%')
2990 {
2991 p++;
2992 putc (c, asm_out_file);
2993 }
2994 /* %= outputs a number which is unique to each insn in the entire
2995 compilation. This is useful for making local labels that are
2996 referred to more than once in a given insn. */
2997 else if (*p == '=')
2998 {
2999 p++;
3000 fprintf (asm_out_file, "%d", insn_counter);
3001 }
3002 /* % followed by a letter and some digits
3003 outputs an operand in a special way depending on the letter.
3004 Letters `acln' are implemented directly.
3005 Other letters are passed to `output_operand' so that
3006 the PRINT_OPERAND macro can define them. */
3007 else if (ISALPHA (*p))
3008 {
3009 int letter = *p++;
3010 c = atoi (p);
3011
3012 if (! ISDIGIT (*p))
3013 output_operand_lossage ("operand number missing after %%-letter");
3014 else if (this_is_asm_operands
3015 && (c < 0 || (unsigned int) c >= insn_noperands))
3016 output_operand_lossage ("operand number out of range");
3017 else if (letter == 'l')
3018 output_asm_label (operands[c]);
3019 else if (letter == 'a')
3020 output_address (operands[c]);
3021 else if (letter == 'c')
3022 {
3023 if (CONSTANT_ADDRESS_P (operands[c]))
3024 output_addr_const (asm_out_file, operands[c]);
3025 else
3026 output_operand (operands[c], 'c');
3027 }
3028 else if (letter == 'n')
3029 {
3030 if (GET_CODE (operands[c]) == CONST_INT)
3031 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3032 - INTVAL (operands[c]));
3033 else
3034 {
3035 putc ('-', asm_out_file);
3036 output_addr_const (asm_out_file, operands[c]);
3037 }
3038 }
3039 else
3040 output_operand (operands[c], letter);
3041
3042 if (!opoutput[c])
3043 oporder[ops++] = c;
3044 opoutput[c] = 1;
3045
3046 while (ISDIGIT (c = *p))
3047 p++;
3048 }
3049 /* % followed by a digit outputs an operand the default way. */
3050 else if (ISDIGIT (*p))
3051 {
3052 c = atoi (p);
3053 if (this_is_asm_operands
3054 && (c < 0 || (unsigned int) c >= insn_noperands))
3055 output_operand_lossage ("operand number out of range");
3056 else
3057 output_operand (operands[c], 0);
3058
3059 if (!opoutput[c])
3060 oporder[ops++] = c;
3061 opoutput[c] = 1;
3062
3063 while (ISDIGIT (c = *p))
3064 p++;
3065 }
3066 /* % followed by punctuation: output something for that
3067 punctuation character alone, with no operand.
3068 The PRINT_OPERAND macro decides what is actually done. */
3069 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3070 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3071 output_operand (NULL_RTX, *p++);
3072 #endif
3073 else
3074 output_operand_lossage ("invalid %%-code");
3075 break;
3076
3077 default:
3078 putc (c, asm_out_file);
3079 }
3080
3081 /* Write out the variable names for operands, if we know them. */
3082 if (flag_verbose_asm)
3083 output_asm_operand_names (operands, oporder, ops);
3084 if (flag_print_asm_name)
3085 output_asm_name ();
3086
3087 putc ('\n', asm_out_file);
3088 }
3089 \f
3090 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3091
3092 void
3093 output_asm_label (rtx x)
3094 {
3095 char buf[256];
3096
3097 if (GET_CODE (x) == LABEL_REF)
3098 x = XEXP (x, 0);
3099 if (GET_CODE (x) == CODE_LABEL
3100 || (GET_CODE (x) == NOTE
3101 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3102 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3103 else
3104 output_operand_lossage ("`%%l' operand isn't a label");
3105
3106 assemble_name (asm_out_file, buf);
3107 }
3108
3109 /* Print operand X using machine-dependent assembler syntax.
3110 The macro PRINT_OPERAND is defined just to control this function.
3111 CODE is a non-digit that preceded the operand-number in the % spec,
3112 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3113 between the % and the digits.
3114 When CODE is a non-letter, X is 0.
3115
3116 The meanings of the letters are machine-dependent and controlled
3117 by PRINT_OPERAND. */
3118
3119 static void
3120 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3121 {
3122 if (x && GET_CODE (x) == SUBREG)
3123 x = alter_subreg (&x);
3124
3125 /* If X is a pseudo-register, abort now rather than writing trash to the
3126 assembler file. */
3127
3128 if (x && GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3129 abort ();
3130
3131 PRINT_OPERAND (asm_out_file, x, code);
3132 }
3133
3134 /* Print a memory reference operand for address X
3135 using machine-dependent assembler syntax.
3136 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3137
3138 void
3139 output_address (rtx x)
3140 {
3141 walk_alter_subreg (&x);
3142 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3143 }
3144 \f
3145 /* Print an integer constant expression in assembler syntax.
3146 Addition and subtraction are the only arithmetic
3147 that may appear in these expressions. */
3148
3149 void
3150 output_addr_const (FILE *file, rtx x)
3151 {
3152 char buf[256];
3153
3154 restart:
3155 switch (GET_CODE (x))
3156 {
3157 case PC:
3158 putc ('.', file);
3159 break;
3160
3161 case SYMBOL_REF:
3162 #ifdef ASM_OUTPUT_SYMBOL_REF
3163 ASM_OUTPUT_SYMBOL_REF (file, x);
3164 #else
3165 assemble_name (file, XSTR (x, 0));
3166 #endif
3167 break;
3168
3169 case LABEL_REF:
3170 x = XEXP (x, 0);
3171 /* Fall through. */
3172 case CODE_LABEL:
3173 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3174 #ifdef ASM_OUTPUT_LABEL_REF
3175 ASM_OUTPUT_LABEL_REF (file, buf);
3176 #else
3177 assemble_name (file, buf);
3178 #endif
3179 break;
3180
3181 case CONST_INT:
3182 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3183 break;
3184
3185 case CONST:
3186 /* This used to output parentheses around the expression,
3187 but that does not work on the 386 (either ATT or BSD assembler). */
3188 output_addr_const (file, XEXP (x, 0));
3189 break;
3190
3191 case CONST_DOUBLE:
3192 if (GET_MODE (x) == VOIDmode)
3193 {
3194 /* We can use %d if the number is one word and positive. */
3195 if (CONST_DOUBLE_HIGH (x))
3196 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3197 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3198 else if (CONST_DOUBLE_LOW (x) < 0)
3199 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3200 else
3201 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3202 }
3203 else
3204 /* We can't handle floating point constants;
3205 PRINT_OPERAND must handle them. */
3206 output_operand_lossage ("floating constant misused");
3207 break;
3208
3209 case PLUS:
3210 /* Some assemblers need integer constants to appear last (eg masm). */
3211 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3212 {
3213 output_addr_const (file, XEXP (x, 1));
3214 if (INTVAL (XEXP (x, 0)) >= 0)
3215 fprintf (file, "+");
3216 output_addr_const (file, XEXP (x, 0));
3217 }
3218 else
3219 {
3220 output_addr_const (file, XEXP (x, 0));
3221 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3222 || INTVAL (XEXP (x, 1)) >= 0)
3223 fprintf (file, "+");
3224 output_addr_const (file, XEXP (x, 1));
3225 }
3226 break;
3227
3228 case MINUS:
3229 /* Avoid outputting things like x-x or x+5-x,
3230 since some assemblers can't handle that. */
3231 x = simplify_subtraction (x);
3232 if (GET_CODE (x) != MINUS)
3233 goto restart;
3234
3235 output_addr_const (file, XEXP (x, 0));
3236 fprintf (file, "-");
3237 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3238 || GET_CODE (XEXP (x, 1)) == PC
3239 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3240 output_addr_const (file, XEXP (x, 1));
3241 else
3242 {
3243 fputs (targetm.asm_out.open_paren, file);
3244 output_addr_const (file, XEXP (x, 1));
3245 fputs (targetm.asm_out.close_paren, file);
3246 }
3247 break;
3248
3249 case ZERO_EXTEND:
3250 case SIGN_EXTEND:
3251 case SUBREG:
3252 output_addr_const (file, XEXP (x, 0));
3253 break;
3254
3255 default:
3256 #ifdef OUTPUT_ADDR_CONST_EXTRA
3257 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3258 break;
3259
3260 fail:
3261 #endif
3262 output_operand_lossage ("invalid expression as operand");
3263 }
3264 }
3265 \f
3266 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3267 %R prints the value of REGISTER_PREFIX.
3268 %L prints the value of LOCAL_LABEL_PREFIX.
3269 %U prints the value of USER_LABEL_PREFIX.
3270 %I prints the value of IMMEDIATE_PREFIX.
3271 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3272 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3273
3274 We handle alternate assembler dialects here, just like output_asm_insn. */
3275
3276 void
3277 asm_fprintf (FILE *file, const char *p, ...)
3278 {
3279 char buf[10];
3280 char *q, c;
3281 va_list argptr;
3282
3283 va_start (argptr, p);
3284
3285 buf[0] = '%';
3286
3287 while ((c = *p++))
3288 switch (c)
3289 {
3290 #ifdef ASSEMBLER_DIALECT
3291 case '{':
3292 {
3293 int i;
3294
3295 /* If we want the first dialect, do nothing. Otherwise, skip
3296 DIALECT_NUMBER of strings ending with '|'. */
3297 for (i = 0; i < dialect_number; i++)
3298 {
3299 while (*p && *p++ != '|')
3300 ;
3301
3302 if (*p == '|')
3303 p++;
3304 }
3305 }
3306 break;
3307
3308 case '|':
3309 /* Skip to close brace. */
3310 while (*p && *p++ != '}')
3311 ;
3312 break;
3313
3314 case '}':
3315 break;
3316 #endif
3317
3318 case '%':
3319 c = *p++;
3320 q = &buf[1];
3321 while (strchr ("-+ #0", c))
3322 {
3323 *q++ = c;
3324 c = *p++;
3325 }
3326 while (ISDIGIT (c) || c == '.')
3327 {
3328 *q++ = c;
3329 c = *p++;
3330 }
3331 switch (c)
3332 {
3333 case '%':
3334 putc ('%', file);
3335 break;
3336
3337 case 'd': case 'i': case 'u':
3338 case 'x': case 'X': case 'o':
3339 case 'c':
3340 *q++ = c;
3341 *q = 0;
3342 fprintf (file, buf, va_arg (argptr, int));
3343 break;
3344
3345 case 'w':
3346 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3347 'o' cases, but we do not check for those cases. It
3348 means that the value is a HOST_WIDE_INT, which may be
3349 either `long' or `long long'. */
3350 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3351 q += strlen (HOST_WIDE_INT_PRINT);
3352 *q++ = *p++;
3353 *q = 0;
3354 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3355 break;
3356
3357 case 'l':
3358 *q++ = c;
3359 #ifdef HAVE_LONG_LONG
3360 if (*p == 'l')
3361 {
3362 *q++ = *p++;
3363 *q++ = *p++;
3364 *q = 0;
3365 fprintf (file, buf, va_arg (argptr, long long));
3366 }
3367 else
3368 #endif
3369 {
3370 *q++ = *p++;
3371 *q = 0;
3372 fprintf (file, buf, va_arg (argptr, long));
3373 }
3374
3375 break;
3376
3377 case 's':
3378 *q++ = c;
3379 *q = 0;
3380 fprintf (file, buf, va_arg (argptr, char *));
3381 break;
3382
3383 case 'O':
3384 #ifdef ASM_OUTPUT_OPCODE
3385 ASM_OUTPUT_OPCODE (asm_out_file, p);
3386 #endif
3387 break;
3388
3389 case 'R':
3390 #ifdef REGISTER_PREFIX
3391 fprintf (file, "%s", REGISTER_PREFIX);
3392 #endif
3393 break;
3394
3395 case 'I':
3396 #ifdef IMMEDIATE_PREFIX
3397 fprintf (file, "%s", IMMEDIATE_PREFIX);
3398 #endif
3399 break;
3400
3401 case 'L':
3402 #ifdef LOCAL_LABEL_PREFIX
3403 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3404 #endif
3405 break;
3406
3407 case 'U':
3408 fputs (user_label_prefix, file);
3409 break;
3410
3411 #ifdef ASM_FPRINTF_EXTENSIONS
3412 /* Uppercase letters are reserved for general use by asm_fprintf
3413 and so are not available to target specific code. In order to
3414 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3415 they are defined here. As they get turned into real extensions
3416 to asm_fprintf they should be removed from this list. */
3417 case 'A': case 'B': case 'C': case 'D': case 'E':
3418 case 'F': case 'G': case 'H': case 'J': case 'K':
3419 case 'M': case 'N': case 'P': case 'Q': case 'S':
3420 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3421 break;
3422
3423 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3424 #endif
3425 default:
3426 abort ();
3427 }
3428 break;
3429
3430 default:
3431 putc (c, file);
3432 }
3433 va_end (argptr);
3434 }
3435 \f
3436 /* Split up a CONST_DOUBLE or integer constant rtx
3437 into two rtx's for single words,
3438 storing in *FIRST the word that comes first in memory in the target
3439 and in *SECOND the other. */
3440
3441 void
3442 split_double (rtx value, rtx *first, rtx *second)
3443 {
3444 if (GET_CODE (value) == CONST_INT)
3445 {
3446 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3447 {
3448 /* In this case the CONST_INT holds both target words.
3449 Extract the bits from it into two word-sized pieces.
3450 Sign extend each half to HOST_WIDE_INT. */
3451 unsigned HOST_WIDE_INT low, high;
3452 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3453
3454 /* Set sign_bit to the most significant bit of a word. */
3455 sign_bit = 1;
3456 sign_bit <<= BITS_PER_WORD - 1;
3457
3458 /* Set mask so that all bits of the word are set. We could
3459 have used 1 << BITS_PER_WORD instead of basing the
3460 calculation on sign_bit. However, on machines where
3461 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3462 compiler warning, even though the code would never be
3463 executed. */
3464 mask = sign_bit << 1;
3465 mask--;
3466
3467 /* Set sign_extend as any remaining bits. */
3468 sign_extend = ~mask;
3469
3470 /* Pick the lower word and sign-extend it. */
3471 low = INTVAL (value);
3472 low &= mask;
3473 if (low & sign_bit)
3474 low |= sign_extend;
3475
3476 /* Pick the higher word, shifted to the least significant
3477 bits, and sign-extend it. */
3478 high = INTVAL (value);
3479 high >>= BITS_PER_WORD - 1;
3480 high >>= 1;
3481 high &= mask;
3482 if (high & sign_bit)
3483 high |= sign_extend;
3484
3485 /* Store the words in the target machine order. */
3486 if (WORDS_BIG_ENDIAN)
3487 {
3488 *first = GEN_INT (high);
3489 *second = GEN_INT (low);
3490 }
3491 else
3492 {
3493 *first = GEN_INT (low);
3494 *second = GEN_INT (high);
3495 }
3496 }
3497 else
3498 {
3499 /* The rule for using CONST_INT for a wider mode
3500 is that we regard the value as signed.
3501 So sign-extend it. */
3502 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3503 if (WORDS_BIG_ENDIAN)
3504 {
3505 *first = high;
3506 *second = value;
3507 }
3508 else
3509 {
3510 *first = value;
3511 *second = high;
3512 }
3513 }
3514 }
3515 else if (GET_CODE (value) != CONST_DOUBLE)
3516 {
3517 if (WORDS_BIG_ENDIAN)
3518 {
3519 *first = const0_rtx;
3520 *second = value;
3521 }
3522 else
3523 {
3524 *first = value;
3525 *second = const0_rtx;
3526 }
3527 }
3528 else if (GET_MODE (value) == VOIDmode
3529 /* This is the old way we did CONST_DOUBLE integers. */
3530 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3531 {
3532 /* In an integer, the words are defined as most and least significant.
3533 So order them by the target's convention. */
3534 if (WORDS_BIG_ENDIAN)
3535 {
3536 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3537 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3538 }
3539 else
3540 {
3541 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3542 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3543 }
3544 }
3545 else
3546 {
3547 REAL_VALUE_TYPE r;
3548 long l[2];
3549 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3550
3551 /* Note, this converts the REAL_VALUE_TYPE to the target's
3552 format, splits up the floating point double and outputs
3553 exactly 32 bits of it into each of l[0] and l[1] --
3554 not necessarily BITS_PER_WORD bits. */
3555 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3556
3557 /* If 32 bits is an entire word for the target, but not for the host,
3558 then sign-extend on the host so that the number will look the same
3559 way on the host that it would on the target. See for instance
3560 simplify_unary_operation. The #if is needed to avoid compiler
3561 warnings. */
3562
3563 #if HOST_BITS_PER_LONG > 32
3564 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3565 {
3566 if (l[0] & ((long) 1 << 31))
3567 l[0] |= ((long) (-1) << 32);
3568 if (l[1] & ((long) 1 << 31))
3569 l[1] |= ((long) (-1) << 32);
3570 }
3571 #endif
3572
3573 *first = GEN_INT ((HOST_WIDE_INT) l[0]);
3574 *second = GEN_INT ((HOST_WIDE_INT) l[1]);
3575 }
3576 }
3577 \f
3578 /* Return nonzero if this function has no function calls. */
3579
3580 int
3581 leaf_function_p (void)
3582 {
3583 rtx insn;
3584 rtx link;
3585
3586 if (current_function_profile || profile_arc_flag)
3587 return 0;
3588
3589 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3590 {
3591 if (GET_CODE (insn) == CALL_INSN
3592 && ! SIBLING_CALL_P (insn))
3593 return 0;
3594 if (GET_CODE (insn) == INSN
3595 && GET_CODE (PATTERN (insn)) == SEQUENCE
3596 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3597 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3598 return 0;
3599 }
3600 for (link = current_function_epilogue_delay_list;
3601 link;
3602 link = XEXP (link, 1))
3603 {
3604 insn = XEXP (link, 0);
3605
3606 if (GET_CODE (insn) == CALL_INSN
3607 && ! SIBLING_CALL_P (insn))
3608 return 0;
3609 if (GET_CODE (insn) == INSN
3610 && GET_CODE (PATTERN (insn)) == SEQUENCE
3611 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3612 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3613 return 0;
3614 }
3615
3616 return 1;
3617 }
3618
3619 /* Return 1 if branch is a forward branch.
3620 Uses insn_shuid array, so it works only in the final pass. May be used by
3621 output templates to customary add branch prediction hints.
3622 */
3623 int
3624 final_forward_branch_p (rtx insn)
3625 {
3626 int insn_id, label_id;
3627 if (!uid_shuid)
3628 abort ();
3629 insn_id = INSN_SHUID (insn);
3630 label_id = INSN_SHUID (JUMP_LABEL (insn));
3631 /* We've hit some insns that does not have id information available. */
3632 if (!insn_id || !label_id)
3633 abort ();
3634 return insn_id < label_id;
3635 }
3636
3637 /* On some machines, a function with no call insns
3638 can run faster if it doesn't create its own register window.
3639 When output, the leaf function should use only the "output"
3640 registers. Ordinarily, the function would be compiled to use
3641 the "input" registers to find its arguments; it is a candidate
3642 for leaf treatment if it uses only the "input" registers.
3643 Leaf function treatment means renumbering so the function
3644 uses the "output" registers instead. */
3645
3646 #ifdef LEAF_REGISTERS
3647
3648 /* Return 1 if this function uses only the registers that can be
3649 safely renumbered. */
3650
3651 int
3652 only_leaf_regs_used (void)
3653 {
3654 int i;
3655 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3656
3657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3658 if ((regs_ever_live[i] || global_regs[i])
3659 && ! permitted_reg_in_leaf_functions[i])
3660 return 0;
3661
3662 if (current_function_uses_pic_offset_table
3663 && pic_offset_table_rtx != 0
3664 && GET_CODE (pic_offset_table_rtx) == REG
3665 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3666 return 0;
3667
3668 return 1;
3669 }
3670
3671 /* Scan all instructions and renumber all registers into those
3672 available in leaf functions. */
3673
3674 static void
3675 leaf_renumber_regs (rtx first)
3676 {
3677 rtx insn;
3678
3679 /* Renumber only the actual patterns.
3680 The reg-notes can contain frame pointer refs,
3681 and renumbering them could crash, and should not be needed. */
3682 for (insn = first; insn; insn = NEXT_INSN (insn))
3683 if (INSN_P (insn))
3684 leaf_renumber_regs_insn (PATTERN (insn));
3685 for (insn = current_function_epilogue_delay_list;
3686 insn;
3687 insn = XEXP (insn, 1))
3688 if (INSN_P (XEXP (insn, 0)))
3689 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3690 }
3691
3692 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3693 available in leaf functions. */
3694
3695 void
3696 leaf_renumber_regs_insn (rtx in_rtx)
3697 {
3698 int i, j;
3699 const char *format_ptr;
3700
3701 if (in_rtx == 0)
3702 return;
3703
3704 /* Renumber all input-registers into output-registers.
3705 renumbered_regs would be 1 for an output-register;
3706 they */
3707
3708 if (GET_CODE (in_rtx) == REG)
3709 {
3710 int newreg;
3711
3712 /* Don't renumber the same reg twice. */
3713 if (in_rtx->used)
3714 return;
3715
3716 newreg = REGNO (in_rtx);
3717 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3718 to reach here as part of a REG_NOTE. */
3719 if (newreg >= FIRST_PSEUDO_REGISTER)
3720 {
3721 in_rtx->used = 1;
3722 return;
3723 }
3724 newreg = LEAF_REG_REMAP (newreg);
3725 if (newreg < 0)
3726 abort ();
3727 regs_ever_live[REGNO (in_rtx)] = 0;
3728 regs_ever_live[newreg] = 1;
3729 REGNO (in_rtx) = newreg;
3730 in_rtx->used = 1;
3731 }
3732
3733 if (INSN_P (in_rtx))
3734 {
3735 /* Inside a SEQUENCE, we find insns.
3736 Renumber just the patterns of these insns,
3737 just as we do for the top-level insns. */
3738 leaf_renumber_regs_insn (PATTERN (in_rtx));
3739 return;
3740 }
3741
3742 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3743
3744 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3745 switch (*format_ptr++)
3746 {
3747 case 'e':
3748 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3749 break;
3750
3751 case 'E':
3752 if (NULL != XVEC (in_rtx, i))
3753 {
3754 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3755 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3756 }
3757 break;
3758
3759 case 'S':
3760 case 's':
3761 case '0':
3762 case 'i':
3763 case 'w':
3764 case 'n':
3765 case 'u':
3766 break;
3767
3768 default:
3769 abort ();
3770 }
3771 }
3772 #endif
3773
3774
3775 /* When -gused is used, emit debug info for only used symbols. But in
3776 addition to the standard intercepted debug_hooks there are some direct
3777 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3778 Those routines may also be called from a higher level intercepted routine. So
3779 to prevent recording data for an inner call to one of these for an intercept,
3780 we maintain a intercept nesting counter (debug_nesting). We only save the
3781 intercepted arguments if the nesting is 1. */
3782 int debug_nesting = 0;
3783
3784 static tree *symbol_queue;
3785 int symbol_queue_index = 0;
3786 static int symbol_queue_size = 0;
3787
3788 /* Generate the symbols for any queued up type symbols we encountered
3789 while generating the type info for some originally used symbol.
3790 This might generate additional entries in the queue. Only when
3791 the nesting depth goes to 0 is this routine called. */
3792
3793 void
3794 debug_flush_symbol_queue (void)
3795 {
3796 int i;
3797
3798 /* Make sure that additionally queued items are not flushed
3799 prematurely. */
3800
3801 ++debug_nesting;
3802
3803 for (i = 0; i < symbol_queue_index; ++i)
3804 {
3805 /* If we pushed queued symbols then such symbols are must be
3806 output no matter what anyone else says. Specifically,
3807 we need to make sure dbxout_symbol() thinks the symbol was
3808 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3809 which may be set for outside reasons. */
3810 int saved_tree_used = TREE_USED (symbol_queue[i]);
3811 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3812 TREE_USED (symbol_queue[i]) = 1;
3813 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3814
3815 #ifdef DBX_DEBUGGING_INFO
3816 dbxout_symbol (symbol_queue[i], 0);
3817 #endif
3818
3819 TREE_USED (symbol_queue[i]) = saved_tree_used;
3820 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3821 }
3822
3823 symbol_queue_index = 0;
3824 --debug_nesting;
3825 }
3826
3827 /* Queue a type symbol needed as part of the definition of a decl
3828 symbol. These symbols are generated when debug_flush_symbol_queue()
3829 is called. */
3830
3831 void
3832 debug_queue_symbol (tree decl)
3833 {
3834 if (symbol_queue_index >= symbol_queue_size)
3835 {
3836 symbol_queue_size += 10;
3837 symbol_queue = xrealloc (symbol_queue,
3838 symbol_queue_size * sizeof (tree));
3839 }
3840
3841 symbol_queue[symbol_queue_index++] = decl;
3842 }
3843
3844 /* Free symbol queue. */
3845 void
3846 debug_free_queue (void)
3847 {
3848 if (symbol_queue)
3849 {
3850 free (symbol_queue);
3851 symbol_queue = NULL;
3852 symbol_queue_size = 0;
3853 }
3854 }